[ 494.071268] env[62346]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62346) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.071720] env[62346]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62346) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.071720] env[62346]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62346) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.072053] env[62346]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 494.164817] env[62346]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62346) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 494.176139] env[62346]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62346) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 494.323678] env[62346]: INFO nova.virt.driver [None req-e39a1ed0-305d-417d-ac2c-6a625264f8e2 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 494.400347] env[62346]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.400525] env[62346]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.400612] env[62346]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62346) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 497.585381] env[62346]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-a92ccf1f-0bec-4882-a356-6d1b9a492499 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.601743] env[62346]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62346) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 497.601913] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-71679aee-ae62-4cbb-b06e-7448f471d122 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.643783] env[62346]: INFO oslo_vmware.api [-] Successfully established new session; session ID is c8e4e. [ 497.643949] env[62346]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.243s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.644529] env[62346]: INFO nova.virt.vmwareapi.driver [None req-e39a1ed0-305d-417d-ac2c-6a625264f8e2 None None] VMware vCenter version: 7.0.3 [ 497.648022] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97816cf6-3a49-490b-8f6f-332ea14b7ef6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.666213] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b878da8-45cf-4e90-9ecc-254e7af8e260 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.672713] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c937461-cb79-431a-97b5-8c8b5046e821 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.679874] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e358215-dada-4589-92a8-5ccfb9f576a4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.693106] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f3531b-b83b-48ab-b696-4c0e48897347 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.699654] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e49866f-4b8f-4f1f-942e-1c1dae6adfd6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.731195] env[62346]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-e931f1e1-16aa-4c81-b48e-12ac905d7462 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.737507] env[62346]: DEBUG nova.virt.vmwareapi.driver [None req-e39a1ed0-305d-417d-ac2c-6a625264f8e2 None None] Extension org.openstack.compute already exists. {{(pid=62346) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 497.740268] env[62346]: INFO nova.compute.provider_config [None req-e39a1ed0-305d-417d-ac2c-6a625264f8e2 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 497.760609] env[62346]: DEBUG nova.context [None req-e39a1ed0-305d-417d-ac2c-6a625264f8e2 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),5d6c13b1-6bdb-4e01-b4b3-0719036b72d6(cell1) {{(pid=62346) load_cells /opt/stack/nova/nova/context.py:464}} [ 497.762833] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.763099] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.763829] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.764296] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Acquiring lock "5d6c13b1-6bdb-4e01-b4b3-0719036b72d6" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.764492] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Lock "5d6c13b1-6bdb-4e01-b4b3-0719036b72d6" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.765553] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Lock "5d6c13b1-6bdb-4e01-b4b3-0719036b72d6" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 497.786580] env[62346]: INFO dbcounter [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Registered counter for database nova_cell0 [ 497.794705] env[62346]: INFO dbcounter [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Registered counter for database nova_cell1 [ 497.797853] env[62346]: DEBUG oslo_db.sqlalchemy.engines [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62346) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 497.798225] env[62346]: DEBUG oslo_db.sqlalchemy.engines [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62346) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 497.802939] env[62346]: DEBUG dbcounter [-] [62346] Writer thread running {{(pid=62346) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 497.803767] env[62346]: DEBUG dbcounter [-] [62346] Writer thread running {{(pid=62346) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 497.805989] env[62346]: ERROR nova.db.main.api [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 497.805989] env[62346]: result = function(*args, **kwargs) [ 497.805989] env[62346]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 497.805989] env[62346]: return func(*args, **kwargs) [ 497.805989] env[62346]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 497.805989] env[62346]: result = fn(*args, **kwargs) [ 497.805989] env[62346]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 497.805989] env[62346]: return f(*args, **kwargs) [ 497.805989] env[62346]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 497.805989] env[62346]: return db.service_get_minimum_version(context, binaries) [ 497.805989] env[62346]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 497.805989] env[62346]: _check_db_access() [ 497.805989] env[62346]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 497.805989] env[62346]: stacktrace = ''.join(traceback.format_stack()) [ 497.805989] env[62346]: [ 497.807173] env[62346]: ERROR nova.db.main.api [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 497.807173] env[62346]: result = function(*args, **kwargs) [ 497.807173] env[62346]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 497.807173] env[62346]: return func(*args, **kwargs) [ 497.807173] env[62346]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 497.807173] env[62346]: result = fn(*args, **kwargs) [ 497.807173] env[62346]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 497.807173] env[62346]: return f(*args, **kwargs) [ 497.807173] env[62346]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 497.807173] env[62346]: return db.service_get_minimum_version(context, binaries) [ 497.807173] env[62346]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 497.807173] env[62346]: _check_db_access() [ 497.807173] env[62346]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 497.807173] env[62346]: stacktrace = ''.join(traceback.format_stack()) [ 497.807173] env[62346]: [ 497.807782] env[62346]: WARNING nova.objects.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Failed to get minimum service version for cell 5d6c13b1-6bdb-4e01-b4b3-0719036b72d6 [ 497.807782] env[62346]: WARNING nova.objects.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 497.808154] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Acquiring lock "singleton_lock" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 497.808328] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Acquired lock "singleton_lock" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 497.808586] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Releasing lock "singleton_lock" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 497.808915] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Full set of CONF: {{(pid=62346) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 497.809076] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ******************************************************************************** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 497.809209] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] Configuration options gathered from: {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 497.809352] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 497.809549] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 497.809681] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ================================================================================ {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 497.809898] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] allow_resize_to_same_host = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.810087] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] arq_binding_timeout = 300 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.810224] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] backdoor_port = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.810354] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] backdoor_socket = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.810520] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] block_device_allocate_retries = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.810683] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] block_device_allocate_retries_interval = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.810858] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cert = self.pem {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.811044] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.811220] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute_monitors = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.811386] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] config_dir = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.811557] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] config_drive_format = iso9660 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.811695] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.811863] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] config_source = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.812047] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] console_host = devstack {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.812221] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] control_exchange = nova {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.812383] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cpu_allocation_ratio = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.812595] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] daemon = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.812794] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] debug = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.812965] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] default_access_ip_network_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.813149] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] default_availability_zone = nova {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.813310] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] default_ephemeral_format = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.813473] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] default_green_pool_size = 1000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.813708] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.813916] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] default_schedule_zone = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.814098] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] disk_allocation_ratio = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.814269] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] enable_new_services = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.814454] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] enabled_apis = ['osapi_compute'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.814628] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] enabled_ssl_apis = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.814833] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] flat_injected = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.815029] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] force_config_drive = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.815198] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] force_raw_images = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.815374] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] graceful_shutdown_timeout = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.815540] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] heal_instance_info_cache_interval = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.815765] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] host = cpu-1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.815948] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.816147] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] initial_disk_allocation_ratio = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.816325] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] initial_ram_allocation_ratio = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.816545] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.816714] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_build_timeout = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.816879] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_delete_interval = 300 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.817065] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_format = [instance: %(uuid)s] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.817297] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_name_template = instance-%08x {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.817485] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_usage_audit = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.817666] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_usage_audit_period = month {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.817838] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.818015] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] instances_path = /opt/stack/data/nova/instances {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.818194] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] internal_service_availability_zone = internal {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.818353] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] key = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.818516] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] live_migration_retry_count = 30 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.818681] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_config_append = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.818850] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819024] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_dir = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819188] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819319] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_options = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819484] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_rotate_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819655] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_rotate_interval_type = days {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819823] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] log_rotation_type = none {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.819955] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.820095] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.820271] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.820439] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.820570] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.820735] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] long_rpc_timeout = 1800 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.820894] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] max_concurrent_builds = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.821103] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] max_concurrent_live_migrations = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.821270] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] max_concurrent_snapshots = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.821429] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] max_local_block_devices = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.821584] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] max_logfile_count = 30 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.821742] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] max_logfile_size_mb = 200 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.821901] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] maximum_instance_delete_attempts = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.822086] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metadata_listen = 0.0.0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.822262] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metadata_listen_port = 8775 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.822433] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metadata_workers = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.822596] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] migrate_max_retries = -1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.822767] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] mkisofs_cmd = genisoimage {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.822979] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] my_block_storage_ip = 10.180.1.21 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.823127] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] my_ip = 10.180.1.21 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.823295] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] network_allocate_retries = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.823482] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.823702] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] osapi_compute_listen = 0.0.0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.823910] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] osapi_compute_listen_port = 8774 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.824106] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] osapi_compute_unique_server_name_scope = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.824286] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] osapi_compute_workers = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.824450] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] password_length = 12 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.824614] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] periodic_enable = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.824797] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] periodic_fuzzy_delay = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.824991] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] pointer_model = usbtablet {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.825180] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] preallocate_images = none {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.825347] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] publish_errors = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.825480] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] pybasedir = /opt/stack/nova {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.825638] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ram_allocation_ratio = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.825801] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] rate_limit_burst = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.825974] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] rate_limit_except_level = CRITICAL {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.826177] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] rate_limit_interval = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.826350] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reboot_timeout = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.826512] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reclaim_instance_interval = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.826674] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] record = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.826846] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reimage_timeout_per_gb = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827023] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] report_interval = 120 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827190] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] rescue_timeout = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827354] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reserved_host_cpus = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827515] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reserved_host_disk_mb = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827672] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reserved_host_memory_mb = 512 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827832] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] reserved_huge_pages = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.827992] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] resize_confirm_window = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.828166] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] resize_fs_using_block_device = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.828324] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] resume_guests_state_on_host_boot = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.828496] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.828659] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] rpc_response_timeout = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.828824] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] run_external_periodic_tasks = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.828997] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] running_deleted_instance_action = reap {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.829174] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] running_deleted_instance_poll_interval = 1800 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.829335] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] running_deleted_instance_timeout = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.829495] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler_instance_sync_interval = 120 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.829662] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_down_time = 720 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.829834] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] servicegroup_driver = db {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.829998] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] shelved_offload_time = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.830171] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] shelved_poll_interval = 3600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.830339] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] shutdown_timeout = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.830499] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] source_is_ipv6 = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.830658] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ssl_only = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.830912] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.831093] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] sync_power_state_interval = 600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.831259] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] sync_power_state_pool_size = 1000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.831430] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] syslog_log_facility = LOG_USER {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.831590] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] tempdir = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.831756] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] timeout_nbd = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.831929] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] transport_url = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.832106] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] update_resources_interval = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.832272] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_cow_images = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.832433] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_eventlog = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.832595] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_journal = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.832754] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_json = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.832915] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_rootwrap_daemon = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.833083] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_stderr = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.833267] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] use_syslog = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.833498] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vcpu_pin_set = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.833688] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plugging_is_fatal = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.833889] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plugging_timeout = 300 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.834077] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] virt_mkfs = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.834270] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] volume_usage_poll_interval = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.834463] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] watch_log_file = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.834641] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] web = /usr/share/spice-html5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 497.834851] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_concurrency.disable_process_locking = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.835173] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.835361] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.835562] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.835712] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.835886] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.836070] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.836259] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.auth_strategy = keystone {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.836439] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.compute_link_prefix = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.836622] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.836799] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.dhcp_domain = novalocal {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.836969] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.enable_instance_password = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.837148] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.glance_link_prefix = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.837316] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.837489] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.837653] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.instance_list_per_project_cells = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.837822] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.list_records_by_skipping_down_cells = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.837986] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.local_metadata_per_cell = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.838165] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.max_limit = 1000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.838334] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.metadata_cache_expiration = 15 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.838512] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.neutron_default_tenant_id = default {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.838683] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.use_neutron_default_nets = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.838855] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.839031] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.839207] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.839384] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.839556] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_dynamic_targets = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.839725] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_jsonfile_path = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.839909] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.840120] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.backend = dogpile.cache.memcached {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.840293] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.backend_argument = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.840469] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.config_prefix = cache.oslo {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.840642] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.dead_timeout = 60.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.840807] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.debug_cache_backend = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.840977] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.enable_retry_client = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.841156] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.enable_socket_keepalive = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.841331] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.enabled = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.841499] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.enforce_fips_mode = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.841665] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.expiration_time = 600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.841833] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.hashclient_retry_attempts = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842007] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.hashclient_retry_delay = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842181] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_dead_retry = 300 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842341] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_password = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842505] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842669] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842835] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_pool_maxsize = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.842999] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.843176] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_sasl_enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.843359] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.843524] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_socket_timeout = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.843685] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.memcache_username = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.843881] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.proxies = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.844071] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.redis_password = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.844254] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.redis_sentinel_service_name = mymaster {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.844434] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.844606] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.redis_server = localhost:6379 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.844778] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.redis_socket_timeout = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.845009] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.redis_username = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.845221] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.retry_attempts = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.845395] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.retry_delay = 0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.845563] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.socket_keepalive_count = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.845731] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.socket_keepalive_idle = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.845895] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.socket_keepalive_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.846070] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.tls_allowed_ciphers = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.846235] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.tls_cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.846394] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.tls_certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.846557] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.tls_enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.846716] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cache.tls_keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.846889] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.847080] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.auth_type = password {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.847247] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.847427] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.catalog_info = volumev3::publicURL {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.847591] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.847759] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.847926] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.cross_az_attach = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.848177] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.debug = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.848378] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.endpoint_template = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.848552] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.http_retries = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.848721] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.848887] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.849078] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.os_region_name = RegionOne {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.849256] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.849422] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cinder.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.849600] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.849766] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.cpu_dedicated_set = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.849933] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.cpu_shared_set = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.850116] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.image_type_exclude_list = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.850284] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.850450] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.max_concurrent_disk_ops = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.850614] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.max_disk_devices_to_attach = -1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.850780] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.850955] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.851146] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.resource_provider_association_refresh = 300 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.851315] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.851483] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.shutdown_retry_interval = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.851668] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.851850] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] conductor.workers = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.852047] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] console.allowed_origins = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.852215] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] console.ssl_ciphers = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.852389] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] console.ssl_minimum_version = default {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.852562] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] consoleauth.enforce_session_timeout = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.852733] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] consoleauth.token_ttl = 600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.852903] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.853074] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.853244] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.853407] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.853567] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.853731] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.853930] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.854108] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.854276] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.854437] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.854596] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.region_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.854760] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.854945] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.855151] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.service_type = accelerator {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.855324] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.855487] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.855648] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.855814] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.856011] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.856209] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] cyborg.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.856406] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.backend = sqlalchemy {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.856584] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.connection = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.856755] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.connection_debug = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.856934] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.connection_parameters = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.857113] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.connection_recycle_time = 3600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.857284] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.connection_trace = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.857452] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.db_inc_retry_interval = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.857619] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.db_max_retries = 20 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.857784] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.db_max_retry_interval = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.857950] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.db_retry_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.858126] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.max_overflow = 50 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.858290] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.max_pool_size = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.858454] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.max_retries = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.858630] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.858791] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.mysql_wsrep_sync_wait = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.858954] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.pool_timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.859131] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.retry_interval = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.859293] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.slave_connection = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.859455] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.sqlite_synchronous = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.859620] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] database.use_db_reconnect = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.859806] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.backend = sqlalchemy {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.859983] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.connection = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.860164] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.connection_debug = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.860341] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.connection_parameters = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.860509] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.connection_recycle_time = 3600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.860676] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.connection_trace = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.860843] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.db_inc_retry_interval = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.861021] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.db_max_retries = 20 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.861192] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.db_max_retry_interval = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.861357] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.db_retry_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.861520] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.max_overflow = 50 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.861683] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.max_pool_size = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.861846] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.max_retries = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.862029] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.862196] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.862356] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.pool_timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.862520] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.retry_interval = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.862683] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.slave_connection = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.862847] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] api_database.sqlite_synchronous = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.863035] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] devices.enabled_mdev_types = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.863221] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.863396] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ephemeral_storage_encryption.default_format = luks {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.863561] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ephemeral_storage_encryption.enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.863725] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.863927] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.api_servers = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.864111] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.864277] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.864440] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.864598] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.864756] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.864941] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.debug = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.865128] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.default_trusted_certificate_ids = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.865299] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.enable_certificate_validation = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.865498] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.enable_rbd_download = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.865721] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.865907] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.866086] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.866252] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.866409] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.866571] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.num_retries = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.866743] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.rbd_ceph_conf = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.866920] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.rbd_connect_timeout = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.867142] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.rbd_pool = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.867320] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.rbd_user = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.867485] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.region_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.867650] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.867814] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.867990] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.service_type = image {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.868171] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.868333] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.868493] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.868652] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.868835] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.869011] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.verify_glance_signatures = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.869181] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] glance.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.869351] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] guestfs.debug = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.869525] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] mks.enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.869900] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.870110] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] image_cache.manager_interval = 2400 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.870287] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] image_cache.precache_concurrency = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.870461] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] image_cache.remove_unused_base_images = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.870633] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.870803] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.870985] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] image_cache.subdirectory_name = _base {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.871179] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.api_max_retries = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.871348] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.api_retry_interval = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.871512] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.871680] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.auth_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.871844] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872012] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872184] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872347] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.conductor_group = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872507] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872665] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872823] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.872989] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.873162] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.873322] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.873479] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.873644] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.peer_list = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.873836] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.region_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.874010] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.874184] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.serial_console_state_timeout = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.874347] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.874520] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.service_type = baremetal {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.874684] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.shard = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.874864] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.875064] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.875237] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.875399] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.875583] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.875748] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ironic.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.875935] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.876144] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] key_manager.fixed_key = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.876348] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.876516] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.barbican_api_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.876679] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.barbican_endpoint = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.876856] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.barbican_endpoint_type = public {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.877119] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.barbican_region_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.877403] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.877695] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.877996] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.878300] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.878585] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.878876] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.number_of_retries = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.879178] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.retry_delay = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.879465] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.send_service_user_token = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.879745] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.880039] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.880331] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.verify_ssl = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.880629] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican.verify_ssl_path = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.880931] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.881243] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.auth_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.881534] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.881819] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.882132] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.882433] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.882724] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.883038] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.883342] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] barbican_service_user.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.883649] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.approle_role_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.883953] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.approle_secret_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.884269] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.884579] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.884901] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.885227] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.885528] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.885854] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.kv_mountpoint = secret {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.886178] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.kv_path = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.886494] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.kv_version = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.886805] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.namespace = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.887128] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.root_token_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.887450] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.887758] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.ssl_ca_crt_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.888092] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.888406] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.use_ssl = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.888717] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.889057] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.889387] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.auth_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.889696] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.890013] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.890356] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.890674] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.890987] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.891325] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.891645] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.891981] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.892312] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.892632] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.892953] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.region_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.893287] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.893607] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.893952] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.service_type = identity {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.894289] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.894600] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.894925] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.895252] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.895595] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.895905] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] keystone.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.896287] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.connection_uri = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.896593] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_mode = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.896907] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_model_extra_flags = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.897246] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_models = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.897582] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_power_governor_high = performance {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.897920] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_power_governor_low = powersave {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.898252] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_power_management = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.898589] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.898917] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.device_detach_attempts = 8 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.899241] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.device_detach_timeout = 20 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.899560] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.disk_cachemodes = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.899876] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.disk_prefix = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.900220] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.enabled_perf_events = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.900545] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.file_backed_memory = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.900874] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.gid_maps = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.901226] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.hw_disk_discard = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.901567] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.hw_machine_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.901912] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_rbd_ceph_conf = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.902261] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.902544] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.902846] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_rbd_glance_store_name = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.903151] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_rbd_pool = rbd {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.903462] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_type = default {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.903782] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.images_volume_group = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.904084] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.inject_key = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.904434] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.inject_partition = -2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.904749] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.inject_password = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.905077] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.iscsi_iface = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.905377] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.iser_use_multipath = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.905694] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_bandwidth = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.906015] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.906336] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_downtime = 500 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.906645] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.906948] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.907277] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_inbound_addr = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.907590] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.907899] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_permit_post_copy = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.908227] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_scheme = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.908563] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_timeout_action = abort {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.908884] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_tunnelled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.909213] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_uri = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.909530] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.live_migration_with_native_tls = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.909846] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.max_queues = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.910170] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.910585] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.910901] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.nfs_mount_options = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.911430] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.911769] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.912062] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.num_iser_scan_tries = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.912356] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.num_memory_encrypted_guests = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.912681] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.912988] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.num_pcie_ports = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.913329] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.num_volume_scan_tries = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.913647] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.pmem_namespaces = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.913970] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.quobyte_client_cfg = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.914474] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.914791] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rbd_connect_timeout = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.915125] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.915446] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.915760] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rbd_secret_uuid = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.916084] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rbd_user = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.916407] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.916732] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.remote_filesystem_transport = ssh {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.917060] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rescue_image_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.917367] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rescue_kernel_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.917664] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rescue_ramdisk_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.917996] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.918334] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.rx_queue_size = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.918664] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.smbfs_mount_options = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.919170] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.919496] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.snapshot_compression = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.919810] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.snapshot_image_format = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.920219] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.920538] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.sparse_logical_volumes = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.920855] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.swtpm_enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.921197] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.swtpm_group = tss {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.921516] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.swtpm_user = tss {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.922067] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.sysinfo_serial = unique {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.922204] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.tb_cache_size = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.922495] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.tx_queue_size = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.922819] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.uid_maps = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.923157] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.use_virtio_for_bridges = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.923495] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.virt_type = kvm {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.923847] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.volume_clear = zero {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.924182] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.volume_clear_size = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.924519] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.volume_use_multipath = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.924835] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_cache_path = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.925166] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.925471] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_mount_group = qemu {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.925770] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_mount_opts = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.926116] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.926590] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.926911] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.vzstorage_mount_user = stack {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.927250] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.927574] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.927897] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.auth_type = password {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.928196] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.928500] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.928795] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.929108] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.929403] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.929717] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.default_floating_pool = public {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.930015] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.930322] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.extension_sync_interval = 600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.930627] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.http_retries = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.930940] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.931264] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.931572] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.931898] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.932217] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.932524] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.ovs_bridge = br-int {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.932830] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.physnets = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.933161] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.region_name = RegionOne {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.933476] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.933811] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.service_metadata_proxy = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.934126] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.934440] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.service_type = network {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.934747] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.935068] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.935367] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.935675] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.936029] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.936338] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] neutron.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.936668] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] notifications.bdms_in_notifications = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.937023] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] notifications.default_level = INFO {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.937362] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] notifications.notification_format = unversioned {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.937679] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] notifications.notify_on_state_change = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.938018] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.938358] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] pci.alias = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.938682] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] pci.device_spec = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.938983] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] pci.report_in_placement = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.939329] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.939656] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.auth_type = password {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.939983] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.940310] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.940623] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.940943] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.941277] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.941589] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.941904] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.default_domain_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.942254] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.default_domain_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.942561] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.domain_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.942874] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.domain_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.943186] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.943500] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.943820] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.944146] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.944443] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.944768] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.password = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.945305] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.project_domain_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.945446] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.project_domain_name = Default {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.945737] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.project_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.946071] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.project_name = service {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.946404] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.region_name = RegionOne {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.946709] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.947043] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.947375] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.service_type = placement {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.947680] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.947981] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.948298] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.948603] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.system_scope = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.948901] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.949217] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.trust_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.949532] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.user_domain_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.949840] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.user_domain_name = Default {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.950148] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.user_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.950473] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.username = placement {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.950797] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.951106] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] placement.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.951427] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.cores = 20 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.951729] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.count_usage_from_placement = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.952056] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.952394] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.injected_file_content_bytes = 10240 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.952711] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.injected_file_path_length = 255 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.953046] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.injected_files = 5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.953373] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.instances = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.953674] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.key_pairs = 100 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.953996] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.metadata_items = 128 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.954324] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.ram = 51200 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.954646] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.recheck_quota = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.954969] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.server_group_members = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.955307] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] quota.server_groups = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.955628] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.955949] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.956269] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.image_metadata_prefilter = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.956587] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.956921] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.max_attempts = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.957266] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.max_placement_results = 1000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.957587] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.957890] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.query_placement_for_image_type_support = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.958210] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.958538] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] scheduler.workers = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.958855] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.959182] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.959504] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.959822] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.960160] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.960495] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.960835] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.961207] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.961530] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.host_subset_size = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.961832] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.962129] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.962448] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.962774] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.isolated_hosts = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.963107] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.isolated_images = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.963411] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.963737] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.964074] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.964400] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.pci_in_placement = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.964723] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.965056] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.965376] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.965670] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.965961] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.966308] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.966629] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.track_instance_changes = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.966969] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.967340] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metrics.required = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.967609] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metrics.weight_multiplier = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.967874] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.968175] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] metrics.weight_setting = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.968680] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.969013] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] serial_console.enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.969360] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] serial_console.port_range = 10000:20000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.969687] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.970016] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.970349] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] serial_console.serialproxy_port = 6083 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.970670] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.970998] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.auth_type = password {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.971327] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.971634] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.971936] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.972257] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.972570] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.972896] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.send_service_user_token = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.973223] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.973531] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] service_user.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.973882] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.agent_enabled = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.974191] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.974714] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.975059] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.975357] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.html5proxy_port = 6082 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.975654] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.image_compression = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.975929] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.jpeg_compression = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.976244] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.playback_compression = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.976542] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.server_listen = 127.0.0.1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.976838] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.977149] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.streaming_mode = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.977449] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] spice.zlib_compression = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.977771] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] upgrade_levels.baseapi = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.978108] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] upgrade_levels.compute = auto {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.978431] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] upgrade_levels.conductor = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.978731] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] upgrade_levels.scheduler = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.979048] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.979372] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.auth_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.979685] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.979994] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.980325] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.980642] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.980956] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.981283] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.981589] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vendordata_dynamic_auth.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.981897] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.api_retry_count = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.982202] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.ca_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.982522] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.cache_prefix = devstack-image-cache {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.982831] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.cluster_name = testcl1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.983165] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.connection_pool_size = 10 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.983476] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.console_delay_seconds = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.983815] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.datastore_regex = ^datastore.* {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.984202] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.984532] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.host_password = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.984844] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.host_port = 443 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.985172] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.host_username = administrator@vsphere.local {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.985502] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.insecure = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.985819] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.integration_bridge = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.986152] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.maximum_objects = 100 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.986547] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.pbm_default_policy = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.986897] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.pbm_enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.987134] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.pbm_wsdl_location = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.987440] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.987722] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.serial_port_proxy_uri = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.988023] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.serial_port_service_uri = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.988328] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.task_poll_interval = 0.5 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.988647] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.use_linked_clone = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.988967] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.vnc_keymap = en-us {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.989298] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.vnc_port = 5900 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.989613] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vmware.vnc_port_total = 10000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.989948] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.auth_schemes = ['none'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.990277] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.990814] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.991163] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.991486] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.novncproxy_port = 6080 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.991816] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.server_listen = 127.0.0.1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.992150] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.992457] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.vencrypt_ca_certs = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.992769] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.vencrypt_client_cert = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.993111] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vnc.vencrypt_client_key = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.993453] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.993784] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.disable_deep_image_inspection = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.994113] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.994434] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.994750] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.995076] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.disable_rootwrap = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.995397] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.enable_numa_live_migration = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.995721] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.996081] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.996403] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.996708] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.libvirt_disable_apic = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.997024] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.997333] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.997645] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.997973] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.998310] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.998619] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.998934] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.999249] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.999548] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 497.999876] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.000244] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.000572] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.client_socket_timeout = 900 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.000899] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.default_pool_size = 1000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.001254] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.keep_alive = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.001585] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.max_header_line = 16384 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.001902] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.secure_proxy_ssl_header = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.002241] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.ssl_ca_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.002561] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.ssl_cert_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.002875] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.ssl_key_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.003203] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.tcp_keepidle = 600 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.003521] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.003826] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] zvm.ca_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.004122] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] zvm.cloud_connector_url = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.004613] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.004914] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] zvm.reachable_timeout = 300 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.005244] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.enforce_new_defaults = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.005547] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.enforce_scope = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.005853] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.policy_default_rule = default {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.006193] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.006506] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.policy_file = policy.yaml {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.006815] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.007132] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.007425] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.007733] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.008063] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.008580] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.008721] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.009054] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.connection_string = messaging:// {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.009565] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.enabled = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.009612] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.es_doc_type = notification {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.009904] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.es_scroll_size = 10000 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.010229] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.es_scroll_time = 2m {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.010500] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.filter_error_trace = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.010763] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.hmac_keys = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.011054] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.sentinel_service_name = mymaster {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.011340] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.socket_timeout = 0.1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.011606] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.trace_requests = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.011879] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler.trace_sqlalchemy = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.012203] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler_jaeger.process_tags = {} {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.012485] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler_jaeger.service_name_prefix = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.012766] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] profiler_otlp.service_name_prefix = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.013069] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] remote_debug.host = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.013350] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] remote_debug.port = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.013673] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.013974] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.014265] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.014548] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.014829] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.015125] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.015407] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.015696] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.015982] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.016288] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.016557] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.016841] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.017142] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.017439] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.017732] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.018037] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.018333] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.018631] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.018926] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.019236] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.019530] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.019834] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.020158] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.020462] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.020750] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.021051] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.021336] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.021626] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.021939] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.022261] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.ssl = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.022583] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.022903] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.023219] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.023524] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.023847] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.ssl_version = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.024148] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.024494] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.024838] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_notifications.retry = -1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.025179] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.025516] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_messaging_notifications.transport_url = **** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.025834] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.auth_section = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.026153] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.auth_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.026456] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.cafile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.026755] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.certfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.027067] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.collect_timing = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.027367] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.connect_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.027654] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.connect_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.027941] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.endpoint_id = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.028263] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.endpoint_override = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.028575] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.insecure = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.028882] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.keyfile = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.029209] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.max_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.029518] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.min_version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.029817] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.region_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.030136] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.retriable_status_codes = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.030437] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.service_name = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.030734] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.service_type = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.031061] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.split_loggers = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.031364] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.status_code_retries = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.031660] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.status_code_retry_delay = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.031990] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.timeout = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.032273] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.valid_interfaces = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.032527] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_limit.version = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.032819] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_reports.file_event_handler = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.033140] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.033461] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] oslo_reports.log_dir = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.033790] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.034109] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.034414] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.034725] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.035036] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.035330] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.035639] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.035951] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_ovs_privileged.group = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.036264] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.036584] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.036899] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.037221] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] vif_plug_ovs_privileged.user = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.037537] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.flat_interface = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.037877] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.038225] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.038558] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.038885] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.039213] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.039531] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.039838] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.040193] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.040525] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.isolate_vif = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.040856] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.041203] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.041532] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.041874] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.ovsdb_interface = native {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.042188] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_vif_ovs.per_port_bridge = False {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.042501] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_brick.lock_path = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.042815] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.043144] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.043471] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] privsep_osbrick.capabilities = [21] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.043774] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] privsep_osbrick.group = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.044053] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] privsep_osbrick.helper_command = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.044371] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.044667] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.044971] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] privsep_osbrick.user = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.045308] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.045597] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] nova_sys_admin.group = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.045894] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] nova_sys_admin.helper_command = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.046211] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.046509] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.046796] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] nova_sys_admin.user = None {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.047046] env[62346]: DEBUG oslo_service.service [None req-8d4ea7fd-9721-40ee-87eb-2610cb0981fe None None] ******************************************************************************** {{(pid=62346) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 498.048152] env[62346]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 498.059559] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Getting list of instances from cluster (obj){ [ 498.059559] env[62346]: value = "domain-c8" [ 498.059559] env[62346]: _type = "ClusterComputeResource" [ 498.059559] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 498.061592] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41284b65-2afd-4f94-a4ff-7e0bb6df22c9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.074955] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Got total of 0 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 498.075855] env[62346]: WARNING nova.virt.vmwareapi.driver [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 498.076597] env[62346]: INFO nova.virt.node [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Generated node identity 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c [ 498.076972] env[62346]: INFO nova.virt.node [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Wrote node identity 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c to /opt/stack/data/n-cpu-1/compute_id [ 498.090398] env[62346]: WARNING nova.compute.manager [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Compute nodes ['50caa86a-fe85-4e00-831f-9ba6f7fe3d1c'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 498.126479] env[62346]: INFO nova.compute.manager [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 498.170220] env[62346]: WARNING nova.compute.manager [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 498.170456] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.170657] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.170803] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.170950] env[62346]: DEBUG nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 498.172059] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51884fce-8e84-4b61-8d56-3119101e3eba {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.181013] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98547a9d-3f10-44a9-9372-947783161562 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.195712] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8326be1a-200e-4ce0-841d-88d3825bf80e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.202869] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1d6260-4f5a-421d-8245-e4d82065816c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.233798] env[62346]: DEBUG nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180561MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 498.233964] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.234165] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.247439] env[62346]: WARNING nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] No compute node record for cpu-1:50caa86a-fe85-4e00-831f-9ba6f7fe3d1c: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c could not be found. [ 498.260822] env[62346]: INFO nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c [ 498.317340] env[62346]: DEBUG nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 498.317544] env[62346]: DEBUG nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 498.424213] env[62346]: INFO nova.scheduler.client.report [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] [req-69be92b0-4126-4f52-8fa8-94a91f779f5d] Created resource provider record via placement API for resource provider with UUID 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 498.442669] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d0ca03-fbb2-493f-ad91-55af44448b7c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.451304] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf3af37-101d-4499-a39d-57376e7228d1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.483023] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b038c7-b843-4b4a-aa89-626fc4c78546 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.491276] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4b851c-38b3-4983-a3f9-80d56d4e73c3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.506102] env[62346]: DEBUG nova.compute.provider_tree [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 498.548684] env[62346]: DEBUG nova.scheduler.client.report [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Updated inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 498.548923] env[62346]: DEBUG nova.compute.provider_tree [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Updating resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c generation from 0 to 1 during operation: update_inventory {{(pid=62346) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 498.549080] env[62346]: DEBUG nova.compute.provider_tree [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 498.596139] env[62346]: DEBUG nova.compute.provider_tree [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Updating resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c generation from 1 to 2 during operation: update_traits {{(pid=62346) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 498.614615] env[62346]: DEBUG nova.compute.resource_tracker [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 498.614892] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.381s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.615090] env[62346]: DEBUG nova.service [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Creating RPC server for service compute {{(pid=62346) start /opt/stack/nova/nova/service.py:182}} [ 498.628806] env[62346]: DEBUG nova.service [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] Join ServiceGroup membership for this service compute {{(pid=62346) start /opt/stack/nova/nova/service.py:199}} [ 498.629066] env[62346]: DEBUG nova.servicegroup.drivers.db [None req-f25d7c9a-9276-43ec-9933-7cb0e778a1f8 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62346) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 507.804861] env[62346]: DEBUG dbcounter [-] [62346] Writing DB stats nova_cell1:SELECT=1 {{(pid=62346) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 507.805646] env[62346]: DEBUG dbcounter [-] [62346] Writing DB stats nova_cell0:SELECT=1 {{(pid=62346) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 531.632758] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.645194] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Getting list of instances from cluster (obj){ [ 531.645194] env[62346]: value = "domain-c8" [ 531.645194] env[62346]: _type = "ClusterComputeResource" [ 531.645194] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 531.646493] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ad4087-8f22-40f6-821d-27d453d913c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.656391] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Got total of 0 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 531.656632] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.656959] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Getting list of instances from cluster (obj){ [ 531.656959] env[62346]: value = "domain-c8" [ 531.656959] env[62346]: _type = "ClusterComputeResource" [ 531.656959] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 531.658155] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1819bd-2219-48c7-bd1a-360635b07328 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.668285] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Got total of 0 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 540.508567] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "d1dcf710-5f98-478f-ada7-c07bc99b6a2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.509319] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Lock "d1dcf710-5f98-478f-ada7-c07bc99b6a2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.537028] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 540.667458] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.667708] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.671451] env[62346]: INFO nova.compute.claims [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.821448] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b272f4e4-8d25-460b-aea4-fb0e6b4ca507 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.830289] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5b5318-7f88-4656-8978-8b66acaee9b3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.866021] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898b3735-492b-4912-ad80-750eb3bfb3f6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.878742] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b906ecd9-16cc-4496-8df5-184f7aee9503 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.896938] env[62346]: DEBUG nova.compute.provider_tree [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.910197] env[62346]: DEBUG nova.scheduler.client.report [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 540.941460] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.274s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.942016] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 540.979276] env[62346]: DEBUG nova.compute.utils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.981229] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Not allocating networking since 'none' was specified. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 540.992060] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 541.066507] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 542.518021] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.518021] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.518021] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.518458] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.518458] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.518458] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.518458] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.518458] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.518611] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.518611] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.518611] env[62346]: DEBUG nova.virt.hardware [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.519735] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2983a616-d287-4c41-9a77-1d920b8376fe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.535312] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d151c895-e7a7-41c8-8d4c-9b0c27ad7937 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.561027] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1fdc8a-fa68-4516-9cf9-08c0e19791fb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.579119] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Instance VIF info [] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 542.591839] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.593338] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5db34b59-6edc-4ae2-97d0-c274a3dad7ad {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.609897] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Created folder: OpenStack in parent group-v4. [ 542.610470] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Creating folder: Project (3fc11f11350c4f76a6976205ccd5797f). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.614419] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f73faf6c-22ef-4a0e-b945-96def825e966 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.628463] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Created folder: Project (3fc11f11350c4f76a6976205ccd5797f) in parent group-v953204. [ 542.631358] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Creating folder: Instances. Parent ref: group-v953205. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.631358] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1421682-77e1-4bc3-8792-60dfb6589a74 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.640455] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Created folder: Instances in parent group-v953205. [ 542.641740] env[62346]: DEBUG oslo.service.loopingcall [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.641740] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 542.642094] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43d8d6b7-c309-4fbb-bd32-7a1fc2a62c83 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.665663] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 542.665663] env[62346]: value = "task-4891573" [ 542.665663] env[62346]: _type = "Task" [ 542.665663] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.677559] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891573, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.175734] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891573, 'name': CreateVM_Task, 'duration_secs': 0.285927} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.175940] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 543.176995] env[62346]: DEBUG oslo_vmware.service [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152cd1fe-15bd-4e57-8036-8decf6d73bdf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.184405] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.184594] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.185285] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 543.185585] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ee77ae9-0077-4370-929b-ac16e2a953fb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.191819] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Waiting for the task: (returnval){ [ 543.191819] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522ba8cf-1109-c0cd-7544-77d7f60f851f" [ 543.191819] env[62346]: _type = "Task" [ 543.191819] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.205879] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522ba8cf-1109-c0cd-7544-77d7f60f851f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.533354] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "b3cb6cbd-a508-4b62-a2b0-14d2963884fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.533354] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "b3cb6cbd-a508-4b62-a2b0-14d2963884fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.565893] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 543.652398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.652398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.653973] env[62346]: INFO nova.compute.claims [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.705388] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.706595] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 543.707093] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.707376] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.709017] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 543.709017] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97738c5a-583a-45ed-96d1-09ca6f122c51 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.729013] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 543.729278] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 543.730019] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150f15f0-1e61-4899-9340-cfff0e4f161d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.737940] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83340c41-ea64-4a2e-b3ad-546c47eb1357 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.748251] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Waiting for the task: (returnval){ [ 543.748251] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52eb18c7-2952-3038-6c1d-987d3cde4610" [ 543.748251] env[62346]: _type = "Task" [ 543.748251] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.757404] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52eb18c7-2952-3038-6c1d-987d3cde4610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.805051] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30ec20f-d967-43b4-bc7c-a7956e532525 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.816375] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839e225e-f951-4e95-8b8f-15edd21a3c98 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.852417] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0504b280-a59d-4fea-b7df-1e69a7d08b45 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.860515] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c986853-ea43-42f7-8f04-8ddccfe0e371 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.876751] env[62346]: DEBUG nova.compute.provider_tree [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.889501] env[62346]: DEBUG nova.scheduler.client.report [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.915181] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.915739] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 543.988743] env[62346]: DEBUG nova.compute.utils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.990264] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 543.990341] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.004137] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 544.115369] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 544.172441] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.172695] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.172849] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.173785] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.173868] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.175037] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.175345] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.175589] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.175769] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.176426] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.176426] env[62346]: DEBUG nova.virt.hardware [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.177087] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99fa54b-fad0-4d37-be36-e857d1b25de7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.190623] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe46fe9-a46a-494b-b1e8-c14e88baacab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.234101] env[62346]: DEBUG nova.policy [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '175d60934ec6468f8aefb62b6faa429f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4144444a7d254b2ba9fcedd8903bd104', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.240327] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquiring lock "ef410c62-2279-4be0-bb88-9fb0735eca19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.240904] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Lock "ef410c62-2279-4be0-bb88-9fb0735eca19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.258730] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 544.259036] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Creating directory with path [datastore2] vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 544.259601] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a71e3904-d923-4de1-90e4-3a01288f1a9f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.271445] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 544.296427] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Created directory with path [datastore2] vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 544.296965] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Fetch image to [datastore2] vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 544.296965] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 544.297682] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215b0924-9cd0-442a-88b4-7172d0c9b424 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.309445] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34bc4b2-1a10-4564-bff9-8456273de36a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.320880] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3b9157-5a2d-4f83-b37f-00635c11531c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.372333] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926cfd43-f8ff-4da3-b775-be18bfe6f19d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.387681] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4ebb9848-fea3-44db-a4eb-246f035028d4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.413135] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.413426] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.415027] env[62346]: INFO nova.compute.claims [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.434440] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 544.544051] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 544.611613] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 544.611806] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 544.653604] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f319095-017d-4f71-965a-a5ce16f2b7e4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.662219] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3efa3d-3509-4eca-86de-7c94a3f2310c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.692807] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8862fb2a-32cc-4a6d-9911-92562d37a06d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.701197] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b740769d-0f3e-4809-9601-39151926668f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.716574] env[62346]: DEBUG nova.compute.provider_tree [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.725974] env[62346]: DEBUG nova.scheduler.client.report [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.747776] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.334s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.748389] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 544.800052] env[62346]: DEBUG nova.compute.utils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.801476] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 544.801702] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.819312] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 544.897043] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 544.928968] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.929349] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.929512] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.929985] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.929985] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.929985] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.930247] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.930399] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.930569] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.930731] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.930897] env[62346]: DEBUG nova.virt.hardware [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.931882] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92b53d5-9ba0-4adf-9adc-13bce1e60a14 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.941780] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ff8f51-3b04-43e0-b227-c86a35166cef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.114366] env[62346]: DEBUG nova.policy [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0a9f4e536b4471b89cc93244181bcab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf244586f5044940ad18cc8fcc81adbf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.313558] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.313974] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.352821] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 545.438309] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.438583] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.440664] env[62346]: INFO nova.compute.claims [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.608196] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c1417b-f4ea-4a7a-b14d-442f0dc5d3c4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.623288] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba9e499-ea92-4ad2-8886-1cafcdc29f55 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.656018] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bb5eb4-6cc3-4842-a98c-3f54c08f6f12 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.667050] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd513edf-7df5-4d1c-95bd-3a89a9a67307 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.684216] env[62346]: DEBUG nova.compute.provider_tree [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.698837] env[62346]: DEBUG nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.715391] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.277s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.715937] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 545.782204] env[62346]: DEBUG nova.compute.utils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.786110] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 545.786110] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 545.820478] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 545.851291] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Successfully created port: c67ffc89-ea76-4d14-ae35-159171dc99a1 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.962672] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.002626] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.002835] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.009633] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.009761] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.010078] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.010164] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.014104] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.014104] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.014104] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.014104] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.014104] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.014708] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.014708] env[62346]: DEBUG nova.virt.hardware [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.019267] env[62346]: DEBUG nova.policy [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46480fb172b044649a85cc1a14c04ff8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1dda2d64ca9c4dca96b9ed840a531345', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 546.019363] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab17b2a-7ca5-45d5-b828-d6a1a0458e7f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.028359] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.040691] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729ad747-b688-458f-8717-34f130a52c9b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.135022] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.135022] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.136918] env[62346]: INFO nova.compute.claims [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.188877] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.189148] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.199425] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.269135] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.334174] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d95253-b8e5-4ca0-9deb-78ae4b0c2470 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.344310] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501206de-a2e2-4b87-bff6-f00a95a6a403 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.378128] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c467fbb9-4d70-48e2-8551-5ea0096508fb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.387301] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2723a674-2ed2-40c4-a79f-2c40a589989b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.402370] env[62346]: DEBUG nova.compute.provider_tree [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.417729] env[62346]: DEBUG nova.scheduler.client.report [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.445790] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.312s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.446339] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 546.449790] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.181s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.451564] env[62346]: INFO nova.compute.claims [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.512317] env[62346]: DEBUG nova.compute.utils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.515697] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 546.515848] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.536915] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.647395] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.674640] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee1cea4-a8db-48f3-a991-ec608dfb79f9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.686329] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.686329] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.686556] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.686759] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.687018] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.687095] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.687328] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.688914] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.688914] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.688914] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.688914] env[62346]: DEBUG nova.virt.hardware [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.689663] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d920c0-e7b2-4a71-aff6-894d42c180d8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.694789] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d00f146-ceb4-4b22-beb1-f33525dd49e9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.729837] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fa31b8-0b43-4678-89c5-ac98695a67f0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.741082] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e27bf5-4d5c-4ad1-9c75-1b1c5fe943a9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.746866] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a192fe-dfe3-4382-a9e3-d62155718526 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.774688] env[62346]: DEBUG nova.compute.provider_tree [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.784593] env[62346]: DEBUG nova.scheduler.client.report [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.804192] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.354s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.804792] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 546.860765] env[62346]: DEBUG nova.compute.utils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.863624] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 546.866942] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.885912] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.978073] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 547.018047] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.018549] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.018549] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.018695] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.019119] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.019317] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.020692] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.020771] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.020924] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.021119] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.021272] env[62346]: DEBUG nova.virt.hardware [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.022643] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc38e08-3cc3-4072-9a8d-4d1a6db3a818 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.031996] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fdaff9-f4b2-4780-b3e3-386944ee3efa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.183575] env[62346]: DEBUG nova.policy [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc64d97014b9438d8c40805d4c6280ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57259b412e4744f28b85130103f6018f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.388587] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Successfully created port: 1e8e9233-cd73-4b51-92fd-8ae301bed293 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.491370] env[62346]: DEBUG nova.policy [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79fad858b6b448f68a7f1bb08761e0e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67d7b1e9bec14d2f8be2b7d5f9da1973', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 548.752728] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Successfully created port: f3601a65-3237-441a-9c1b-b3e029d395e2 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.388820] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "1157187b-7051-4921-bd95-9ef3e2d17104" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.389014] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "1157187b-7051-4921-bd95-9ef3e2d17104" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.411209] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 549.520840] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.521459] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.524063] env[62346]: INFO nova.compute.claims [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.771980] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742648d8-0ef0-4d35-83be-28babeca011b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.781036] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea51030-f57d-4687-ab28-2e0288ad43ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.814628] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78ec161-6164-4215-a5fb-94654d902a87 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.826022] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1bb3aa-42fc-4850-9140-f848213c2af1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.838047] env[62346]: DEBUG nova.compute.provider_tree [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.848973] env[62346]: DEBUG nova.scheduler.client.report [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.864302] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.864852] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 549.930059] env[62346]: DEBUG nova.compute.utils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.931292] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 549.931670] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.943705] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 550.035940] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 550.075071] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.075407] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.075606] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.075797] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.075944] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.076117] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.076332] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.076491] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.076654] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.076864] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.076979] env[62346]: DEBUG nova.virt.hardware [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.078242] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d382217-3dba-4fb8-a2fd-0cc3a7ca749c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.089063] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5346e83c-f213-4fdc-b592-50d5c717d3b5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.246576] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Successfully created port: a5cb1e9a-2538-4d65-be9b-6b9544f4bbea {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.475198] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Successfully updated port: c67ffc89-ea76-4d14-ae35-159171dc99a1 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 550.506062] env[62346]: DEBUG nova.policy [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '900f3d2eeba94364a78d9453604afc95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d9801d7e83545239af34201cc557278', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 550.511541] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "refresh_cache-b3cb6cbd-a508-4b62-a2b0-14d2963884fb" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.511721] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquired lock "refresh_cache-b3cb6cbd-a508-4b62-a2b0-14d2963884fb" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.512537] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.813500] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.508990] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "67fbd771-9a97-428e-9453-4e1eba7e141d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.509495] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Lock "67fbd771-9a97-428e-9453-4e1eba7e141d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.526635] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 551.619929] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.619929] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.621959] env[62346]: INFO nova.compute.claims [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.650083] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Successfully created port: ed58db88-7792-4719-8d7e-e73848a30062 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 551.874021] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f624a2eb-608c-43d4-a93f-a0e9e82eb6f2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.887825] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03be86cf-e18c-4724-812d-76d457c3718c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.922554] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef1bf9c-2525-42ed-beb6-39d85613eda3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.932293] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0292c3-17c6-466d-a706-5445e69793c1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.949208] env[62346]: DEBUG nova.compute.provider_tree [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.960734] env[62346]: DEBUG nova.scheduler.client.report [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.983638] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.364s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.984157] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 552.041060] env[62346]: DEBUG nova.compute.utils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.045790] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Not allocating networking since 'none' was specified. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 552.064710] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 552.141143] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Updating instance_info_cache with network_info: [{"id": "c67ffc89-ea76-4d14-ae35-159171dc99a1", "address": "fa:16:3e:0c:2a:31", "network": {"id": "897cd14e-b875-4ab6-a130-878145e1cc7a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1624571353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4144444a7d254b2ba9fcedd8903bd104", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc67ffc89-ea", "ovs_interfaceid": "c67ffc89-ea76-4d14-ae35-159171dc99a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.155706] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Successfully updated port: 1e8e9233-cd73-4b51-92fd-8ae301bed293 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.160376] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Releasing lock "refresh_cache-b3cb6cbd-a508-4b62-a2b0-14d2963884fb" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.160676] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Instance network_info: |[{"id": "c67ffc89-ea76-4d14-ae35-159171dc99a1", "address": "fa:16:3e:0c:2a:31", "network": {"id": "897cd14e-b875-4ab6-a130-878145e1cc7a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1624571353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4144444a7d254b2ba9fcedd8903bd104", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc67ffc89-ea", "ovs_interfaceid": "c67ffc89-ea76-4d14-ae35-159171dc99a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 552.162179] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:2a:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4fe416-47a6-4542-b59d-8c71ab4d6503', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c67ffc89-ea76-4d14-ae35-159171dc99a1', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 552.174843] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Creating folder: Project (4144444a7d254b2ba9fcedd8903bd104). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 552.176227] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d6efef6-b0ac-4703-b269-5e7c68be58e0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.183482] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 552.189795] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquiring lock "refresh_cache-ef410c62-2279-4be0-bb88-9fb0735eca19" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.189936] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquired lock "refresh_cache-ef410c62-2279-4be0-bb88-9fb0735eca19" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.190100] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.201047] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Created folder: Project (4144444a7d254b2ba9fcedd8903bd104) in parent group-v953204. [ 552.201047] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Creating folder: Instances. Parent ref: group-v953208. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 552.201047] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17be8731-9dd2-4107-bac5-6a97b979beee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.221707] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.221956] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.222123] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.222313] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.222456] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.222602] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.222809] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.222965] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.223141] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.223303] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.223462] env[62346]: DEBUG nova.virt.hardware [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.224398] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7be84e7-3476-42d2-bca4-d4767c81492b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.231482] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Created folder: Instances in parent group-v953208. [ 552.231795] env[62346]: DEBUG oslo.service.loopingcall [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.232333] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 552.232606] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e467194-4d19-413f-a492-0fceb6a38a8d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.254442] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43ef6ee-4392-4f32-9155-1e86cd3c2462 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.261527] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 552.261527] env[62346]: value = "task-4891576" [ 552.261527] env[62346]: _type = "Task" [ 552.261527] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.272937] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Instance VIF info [] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 552.278221] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Creating folder: Project (72448b8fe7224f62a5bc90c77852c499). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 552.279084] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49d48909-9a89-43fb-9079-9cc8ec1b99e9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.286448] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891576, 'name': CreateVM_Task} progress is 6%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.291506] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Created folder: Project (72448b8fe7224f62a5bc90c77852c499) in parent group-v953204. [ 552.291797] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Creating folder: Instances. Parent ref: group-v953210. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 552.292361] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-817df15f-3b21-41fc-bdf8-98d0b805975a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.303870] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Created folder: Instances in parent group-v953210. [ 552.304182] env[62346]: DEBUG oslo.service.loopingcall [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.304346] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 552.304781] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-438cb846-795b-494b-951e-609bf8676872 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.325496] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 552.325496] env[62346]: value = "task-4891579" [ 552.325496] env[62346]: _type = "Task" [ 552.325496] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.338183] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891579, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.450883] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.778573] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891576, 'name': CreateVM_Task, 'duration_secs': 0.392937} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.778573] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 552.822115] env[62346]: DEBUG nova.compute.manager [req-91dc1b38-c418-4545-bcd4-a32f61426829 req-4cda8d5a-732d-4637-9a91-65d251f7c482 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Received event network-vif-plugged-c67ffc89-ea76-4d14-ae35-159171dc99a1 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 552.822331] env[62346]: DEBUG oslo_concurrency.lockutils [req-91dc1b38-c418-4545-bcd4-a32f61426829 req-4cda8d5a-732d-4637-9a91-65d251f7c482 service nova] Acquiring lock "b3cb6cbd-a508-4b62-a2b0-14d2963884fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.822564] env[62346]: DEBUG oslo_concurrency.lockutils [req-91dc1b38-c418-4545-bcd4-a32f61426829 req-4cda8d5a-732d-4637-9a91-65d251f7c482 service nova] Lock "b3cb6cbd-a508-4b62-a2b0-14d2963884fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.823224] env[62346]: DEBUG oslo_concurrency.lockutils [req-91dc1b38-c418-4545-bcd4-a32f61426829 req-4cda8d5a-732d-4637-9a91-65d251f7c482 service nova] Lock "b3cb6cbd-a508-4b62-a2b0-14d2963884fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.823224] env[62346]: DEBUG nova.compute.manager [req-91dc1b38-c418-4545-bcd4-a32f61426829 req-4cda8d5a-732d-4637-9a91-65d251f7c482 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] No waiting events found dispatching network-vif-plugged-c67ffc89-ea76-4d14-ae35-159171dc99a1 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 552.823224] env[62346]: WARNING nova.compute.manager [req-91dc1b38-c418-4545-bcd4-a32f61426829 req-4cda8d5a-732d-4637-9a91-65d251f7c482 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Received unexpected event network-vif-plugged-c67ffc89-ea76-4d14-ae35-159171dc99a1 for instance with vm_state building and task_state spawning. [ 552.835855] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891579, 'name': CreateVM_Task} progress is 99%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.973556] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.973815] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.974528] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 552.975385] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1f89b34-fe08-4601-8a60-15114a0179ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.980524] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Waiting for the task: (returnval){ [ 552.980524] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529c3f6b-3241-d68c-dc93-bd16647538dc" [ 552.980524] env[62346]: _type = "Task" [ 552.980524] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.991182] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529c3f6b-3241-d68c-dc93-bd16647538dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.172477] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Successfully created port: 62ed62bd-9cb7-48b8-b86a-8ea30073423e {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.341490] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891579, 'name': CreateVM_Task} progress is 99%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.466450] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Successfully updated port: f3601a65-3237-441a-9c1b-b3e029d395e2 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 553.496497] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "refresh_cache-f71cb62c-8dc2-4dcc-9da4-2f26c0960531" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.496646] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquired lock "refresh_cache-f71cb62c-8dc2-4dcc-9da4-2f26c0960531" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.496969] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.508959] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.509923] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 553.509923] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.627610] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Updating instance_info_cache with network_info: [{"id": "1e8e9233-cd73-4b51-92fd-8ae301bed293", "address": "fa:16:3e:99:47:5c", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e8e9233-cd", "ovs_interfaceid": "1e8e9233-cd73-4b51-92fd-8ae301bed293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.646346] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Releasing lock "refresh_cache-ef410c62-2279-4be0-bb88-9fb0735eca19" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.646667] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Instance network_info: |[{"id": "1e8e9233-cd73-4b51-92fd-8ae301bed293", "address": "fa:16:3e:99:47:5c", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e8e9233-cd", "ovs_interfaceid": "1e8e9233-cd73-4b51-92fd-8ae301bed293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 553.647523] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:47:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e8e9233-cd73-4b51-92fd-8ae301bed293', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 553.659521] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Creating folder: Project (bf244586f5044940ad18cc8fcc81adbf). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 553.662155] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f173804e-f14a-462e-8594-5363c7ad0d47 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.669122] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "9684739f-82ae-4738-8d27-9d273b547ad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.669397] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "9684739f-82ae-4738-8d27-9d273b547ad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.681234] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Created folder: Project (bf244586f5044940ad18cc8fcc81adbf) in parent group-v953204. [ 553.681487] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Creating folder: Instances. Parent ref: group-v953214. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 553.681758] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdd24282-b2ea-4022-a5b3-cc4e1200bab2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.687483] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 553.695383] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Created folder: Instances in parent group-v953214. [ 553.695383] env[62346]: DEBUG oslo.service.loopingcall [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.695584] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 553.695853] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1b14036-d556-4e88-8b24-33c62377bd78 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.722970] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 553.722970] env[62346]: value = "task-4891582" [ 553.722970] env[62346]: _type = "Task" [ 553.722970] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.733205] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891582, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.753925] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.766433] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.766564] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.768281] env[62346]: INFO nova.compute.claims [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.841271] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891579, 'name': CreateVM_Task, 'duration_secs': 1.33389} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.841696] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 553.845019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.845019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.845019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 553.845019] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d198f01-af73-4b59-bb94-9fa6c2691e1d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.849290] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Waiting for the task: (returnval){ [ 553.849290] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f462c1-1eeb-d4a8-5c69-6162fe88b853" [ 553.849290] env[62346]: _type = "Task" [ 553.849290] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.871274] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.871486] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 553.871810] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.872886] env[62346]: DEBUG nova.compute.manager [req-3435e4f8-6643-4830-a129-3f018d292adb req-0ec46430-75c4-45d8-a44c-ae6647a04dc5 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Received event network-vif-plugged-1e8e9233-cd73-4b51-92fd-8ae301bed293 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 553.873678] env[62346]: DEBUG oslo_concurrency.lockutils [req-3435e4f8-6643-4830-a129-3f018d292adb req-0ec46430-75c4-45d8-a44c-ae6647a04dc5 service nova] Acquiring lock "ef410c62-2279-4be0-bb88-9fb0735eca19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.873678] env[62346]: DEBUG oslo_concurrency.lockutils [req-3435e4f8-6643-4830-a129-3f018d292adb req-0ec46430-75c4-45d8-a44c-ae6647a04dc5 service nova] Lock "ef410c62-2279-4be0-bb88-9fb0735eca19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.873907] env[62346]: DEBUG oslo_concurrency.lockutils [req-3435e4f8-6643-4830-a129-3f018d292adb req-0ec46430-75c4-45d8-a44c-ae6647a04dc5 service nova] Lock "ef410c62-2279-4be0-bb88-9fb0735eca19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.873962] env[62346]: DEBUG nova.compute.manager [req-3435e4f8-6643-4830-a129-3f018d292adb req-0ec46430-75c4-45d8-a44c-ae6647a04dc5 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] No waiting events found dispatching network-vif-plugged-1e8e9233-cd73-4b51-92fd-8ae301bed293 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 553.874186] env[62346]: WARNING nova.compute.manager [req-3435e4f8-6643-4830-a129-3f018d292adb req-0ec46430-75c4-45d8-a44c-ae6647a04dc5 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Received unexpected event network-vif-plugged-1e8e9233-cd73-4b51-92fd-8ae301bed293 for instance with vm_state building and task_state spawning. [ 554.043710] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745744d1-fb08-448e-8168-a4ab30d0177c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.057469] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dedb8103-bd4e-4dbe-ae7b-c974877b5652 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.092647] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd74ccc-3f05-464f-b890-d7ceb1845bb6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.101308] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f89604-7071-4891-8030-e0031069e748 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.115825] env[62346]: DEBUG nova.compute.provider_tree [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.130584] env[62346]: DEBUG nova.scheduler.client.report [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 554.154424] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.154911] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 554.219213] env[62346]: DEBUG nova.compute.utils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.220543] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Not allocating networking since 'none' was specified. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 554.230760] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.230981] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.232479] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 554.232479] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 554.238487] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891582, 'name': CreateVM_Task, 'duration_secs': 0.328123} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.240037] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.240165] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.240235] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.240550] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.241332] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e2d4b84-638f-4f4d-a4e6-c89871f2e9e8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.244144] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 554.257052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.257439] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.257543] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.257607] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.259770] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.259770] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.259770] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.259770] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.259770] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.260088] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 554.262841] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Waiting for the task: (returnval){ [ 554.262841] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52bb9369-0fdd-8c52-42c5-01dfbcdbd8c8" [ 554.262841] env[62346]: _type = "Task" [ 554.262841] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.263335] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.264618] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.268352] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.269108] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.269710] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.269831] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.269983] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 554.270146] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.275645] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52bb9369-0fdd-8c52-42c5-01dfbcdbd8c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.288070] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.288304] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.288479] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.288803] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 554.290114] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d999a9ed-98ec-4ee3-a9e0-c3d6da36cefa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.300754] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138e1013-3121-4cf9-a42f-ea54177ec026 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.322427] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e247020-2c61-48d5-bceb-e7af5e4c4904 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.332060] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9f963a-15b2-40bd-913a-937e5e5160c3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.366944] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180557MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 554.366944] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.367173] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.394314] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 554.441235] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 554.442174] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 554.442174] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.442174] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 554.442174] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.442174] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 554.442511] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 554.442771] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 554.442851] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 554.444320] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 554.444320] env[62346]: DEBUG nova.virt.hardware [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 554.444791] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fd57d1-bdb7-465b-a42e-c6db02caa01e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.455066] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cf21a0-c255-4e6a-ba0d-dc5931684cdf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.472917] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance VIF info [] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.479915] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Creating folder: Project (b5c07e50d0924dae920d326b8ff7fe33). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.480545] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e32540a4-5ef1-4f7d-b428-18ffd0c792e4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.498163] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Created folder: Project (b5c07e50d0924dae920d326b8ff7fe33) in parent group-v953204. [ 554.498353] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Creating folder: Instances. Parent ref: group-v953217. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.498669] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e430a57c-9291-4e94-8a10-d104758ad958 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.510744] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Created folder: Instances in parent group-v953217. [ 554.510822] env[62346]: DEBUG oslo.service.loopingcall [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.511045] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.511251] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90d2cc30-b4f6-4172-b976-6dd4f2051dc6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.527335] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d1dcf710-5f98-478f-ada7-c07bc99b6a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.527638] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b3cb6cbd-a508-4b62-a2b0-14d2963884fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.527736] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ef410c62-2279-4be0-bb88-9fb0735eca19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.527889] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.528098] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.528324] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.528324] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.528472] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67fbd771-9a97-428e-9453-4e1eba7e141d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.528694] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9684739f-82ae-4738-8d27-9d273b547ad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.528848] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 554.529056] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '9', 'num_vm_building': '9', 'num_task_spawning': '9', 'num_os_type_None': '9', 'num_proj_3fc11f11350c4f76a6976205ccd5797f': '1', 'io_workload': '9', 'num_proj_4144444a7d254b2ba9fcedd8903bd104': '1', 'num_proj_bf244586f5044940ad18cc8fcc81adbf': '1', 'num_proj_1dda2d64ca9c4dca96b9ed840a531345': '1', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'num_proj_67d7b1e9bec14d2f8be2b7d5f9da1973': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_72448b8fe7224f62a5bc90c77852c499': '1', 'num_proj_b5c07e50d0924dae920d326b8ff7fe33': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 554.541798] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.541798] env[62346]: value = "task-4891585" [ 554.541798] env[62346]: _type = "Task" [ 554.541798] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.552209] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891585, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.694489] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114c5cf0-018c-4085-8f42-c38f10d2c7b1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.703870] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d9ef79-90eb-491e-ba8b-ce7d6a053768 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.740455] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc27374a-9a2e-408c-8035-fa12c49c8219 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.749845] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d312fd07-5612-4b84-8795-3eb15ad17bbe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.767619] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.773875] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Updating instance_info_cache with network_info: [{"id": "f3601a65-3237-441a-9c1b-b3e029d395e2", "address": "fa:16:3e:49:d6:55", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3601a65-32", "ovs_interfaceid": "f3601a65-3237-441a-9c1b-b3e029d395e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.780427] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.780427] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 554.784125] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.784125] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 554.811645] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Releasing lock "refresh_cache-f71cb62c-8dc2-4dcc-9da4-2f26c0960531" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.811645] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance network_info: |[{"id": "f3601a65-3237-441a-9c1b-b3e029d395e2", "address": "fa:16:3e:49:d6:55", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3601a65-32", "ovs_interfaceid": "f3601a65-3237-441a-9c1b-b3e029d395e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.812671] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:d6:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3601a65-3237-441a-9c1b-b3e029d395e2', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.821325] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Creating folder: Project (1dda2d64ca9c4dca96b9ed840a531345). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.821740] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10be1f5f-33b7-459f-8012-c69ba767431a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.825150] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 554.825229] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.458s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.839449] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Created folder: Project (1dda2d64ca9c4dca96b9ed840a531345) in parent group-v953204. [ 554.839449] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Creating folder: Instances. Parent ref: group-v953220. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.839449] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58dba728-03be-40ed-bc0b-0a27b776b06c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.854718] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Created folder: Instances in parent group-v953220. [ 554.854983] env[62346]: DEBUG oslo.service.loopingcall [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.855196] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.855414] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25839fdc-5f3d-4fa2-b695-ac6ba926332d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.871957] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Successfully updated port: a5cb1e9a-2538-4d65-be9b-6b9544f4bbea {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 554.879520] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.879520] env[62346]: value = "task-4891588" [ 554.879520] env[62346]: _type = "Task" [ 554.879520] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.886122] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "refresh_cache-6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.886333] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquired lock "refresh_cache-6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.886385] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.891230] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891588, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.029106] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Successfully updated port: ed58db88-7792-4719-8d7e-e73848a30062 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.042409] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "refresh_cache-eac976d1-2988-4106-ac61-59b8c1d9c1a3" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.042409] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquired lock "refresh_cache-eac976d1-2988-4106-ac61-59b8c1d9c1a3" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.042648] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.057196] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891585, 'name': CreateVM_Task, 'duration_secs': 0.296911} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.059191] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.059964] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.060319] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.060575] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.061522] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9df8b9-839f-448e-b551-288c7ed25624 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.069138] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for the task: (returnval){ [ 555.069138] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d2880a-0e03-6fe6-feeb-9645debaca80" [ 555.069138] env[62346]: _type = "Task" [ 555.069138] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.080163] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d2880a-0e03-6fe6-feeb-9645debaca80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.155703] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.300667] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.391646] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891588, 'name': CreateVM_Task, 'duration_secs': 0.508997} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.391831] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.392603] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.584566] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.584854] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.585087] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.585400] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.585614] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.585887] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2076fcb2-24b9-49f6-a20e-77e367f7d06b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.599486] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for the task: (returnval){ [ 555.599486] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523e7fe9-7110-ff24-bafb-61a37118494b" [ 555.599486] env[62346]: _type = "Task" [ 555.599486] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.613043] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523e7fe9-7110-ff24-bafb-61a37118494b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.109023] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.109023] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.109023] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.337705] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Updating instance_info_cache with network_info: [{"id": "a5cb1e9a-2538-4d65-be9b-6b9544f4bbea", "address": "fa:16:3e:0d:f0:fb", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5cb1e9a-25", "ovs_interfaceid": "a5cb1e9a-2538-4d65-be9b-6b9544f4bbea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.345648] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Updating instance_info_cache with network_info: [{"id": "ed58db88-7792-4719-8d7e-e73848a30062", "address": "fa:16:3e:1b:80:8a", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped58db88-77", "ovs_interfaceid": "ed58db88-7792-4719-8d7e-e73848a30062", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.359375] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Releasing lock "refresh_cache-6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.359700] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance network_info: |[{"id": "a5cb1e9a-2538-4d65-be9b-6b9544f4bbea", "address": "fa:16:3e:0d:f0:fb", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5cb1e9a-25", "ovs_interfaceid": "a5cb1e9a-2538-4d65-be9b-6b9544f4bbea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 556.360665] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:f0:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5cb1e9a-2538-4d65-be9b-6b9544f4bbea', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.368828] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Creating folder: Project (57259b412e4744f28b85130103f6018f). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.373022] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Releasing lock "refresh_cache-eac976d1-2988-4106-ac61-59b8c1d9c1a3" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.373022] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance network_info: |[{"id": "ed58db88-7792-4719-8d7e-e73848a30062", "address": "fa:16:3e:1b:80:8a", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped58db88-77", "ovs_interfaceid": "ed58db88-7792-4719-8d7e-e73848a30062", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 556.373394] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98723593-8704-4a64-bac3-953bd5c2830d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.373394] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:80:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed58db88-7792-4719-8d7e-e73848a30062', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.384668] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Creating folder: Project (67d7b1e9bec14d2f8be2b7d5f9da1973). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.385444] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c51387d-39e8-48c6-a4fd-d62b2bee9256 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.397354] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Created folder: Project (57259b412e4744f28b85130103f6018f) in parent group-v953204. [ 556.397623] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Creating folder: Instances. Parent ref: group-v953223. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.398980] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83103691-b70a-47d7-8bcd-912b9b8ff7a7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.401214] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Created folder: Project (67d7b1e9bec14d2f8be2b7d5f9da1973) in parent group-v953204. [ 556.401393] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Creating folder: Instances. Parent ref: group-v953224. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.401853] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d45e0f1e-8b21-4c83-912f-5940e7b86ba0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.415166] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Created folder: Instances in parent group-v953223. [ 556.415166] env[62346]: DEBUG oslo.service.loopingcall [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.415166] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 556.415166] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Created folder: Instances in parent group-v953224. [ 556.415166] env[62346]: DEBUG oslo.service.loopingcall [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.415813] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7a231e5-0d1d-458c-9a27-9d8508554e4a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.435050] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 556.435681] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ca01fbe-9c38-466b-a93c-e7d6cd4dd80a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.458838] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.458838] env[62346]: value = "task-4891594" [ 556.458838] env[62346]: _type = "Task" [ 556.458838] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.459956] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.459956] env[62346]: value = "task-4891593" [ 556.459956] env[62346]: _type = "Task" [ 556.459956] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.475344] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891594, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.481195] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891593, 'name': CreateVM_Task} progress is 5%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.757776] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Successfully updated port: 62ed62bd-9cb7-48b8-b86a-8ea30073423e {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 556.777282] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "refresh_cache-1157187b-7051-4921-bd95-9ef3e2d17104" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.777282] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired lock "refresh_cache-1157187b-7051-4921-bd95-9ef3e2d17104" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.777282] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.880680] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.979926] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891594, 'name': CreateVM_Task, 'duration_secs': 0.404124} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.980512] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 556.981493] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.981656] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.981974] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.989665] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45312e4d-6197-42e4-9000-60a522ce7526 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.992026] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891593, 'name': CreateVM_Task, 'duration_secs': 0.355085} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.992214] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 556.993340] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.996277] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Waiting for the task: (returnval){ [ 556.996277] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5240bbca-4062-4ec0-4bc2-305d1dd6bda6" [ 556.996277] env[62346]: _type = "Task" [ 556.996277] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.014645] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5240bbca-4062-4ec0-4bc2-305d1dd6bda6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.315056] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Updating instance_info_cache with network_info: [{"id": "62ed62bd-9cb7-48b8-b86a-8ea30073423e", "address": "fa:16:3e:6d:94:5e", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ed62bd-9c", "ovs_interfaceid": "62ed62bd-9cb7-48b8-b86a-8ea30073423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.334973] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Releasing lock "refresh_cache-1157187b-7051-4921-bd95-9ef3e2d17104" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.334973] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance network_info: |[{"id": "62ed62bd-9cb7-48b8-b86a-8ea30073423e", "address": "fa:16:3e:6d:94:5e", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ed62bd-9c", "ovs_interfaceid": "62ed62bd-9cb7-48b8-b86a-8ea30073423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.335269] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:94:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62ed62bd-9cb7-48b8-b86a-8ea30073423e', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.345082] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating folder: Project (7d9801d7e83545239af34201cc557278). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.345931] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04812ec6-8957-469b-899a-4f3d04058a5d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.359436] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Created folder: Project (7d9801d7e83545239af34201cc557278) in parent group-v953204. [ 557.359436] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating folder: Instances. Parent ref: group-v953229. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.359675] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d431cbb4-505b-4da3-b412-11dfc3b46324 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.377349] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Created folder: Instances in parent group-v953229. [ 557.377349] env[62346]: DEBUG oslo.service.loopingcall [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.377349] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 557.377349] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20bab97a-b683-4f62-9d3c-a5c6f76dca79 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.401296] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.401296] env[62346]: value = "task-4891597" [ 557.401296] env[62346]: _type = "Task" [ 557.401296] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.412026] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891597, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.511365] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.511887] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.512224] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.512597] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.512858] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.513656] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2096723d-b3c7-4d1b-bf64-9c854453931c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.524204] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Waiting for the task: (returnval){ [ 557.524204] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5288c740-e69b-950d-e547-050e2916c57e" [ 557.524204] env[62346]: _type = "Task" [ 557.524204] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.537747] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5288c740-e69b-950d-e547-050e2916c57e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.914725] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891597, 'name': CreateVM_Task, 'duration_secs': 0.418339} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.917464] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 557.917464] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.036221] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.036566] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.037217] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.037514] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.037861] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.038175] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fc68a60-eb47-4e8e-986c-3783ab950f42 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.044155] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 558.044155] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520ee2bd-0d48-e18f-1372-ebab7a8c93d1" [ 558.044155] env[62346]: _type = "Task" [ 558.044155] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.057440] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520ee2bd-0d48-e18f-1372-ebab7a8c93d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.209951] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Received event network-changed-c67ffc89-ea76-4d14-ae35-159171dc99a1 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 558.210182] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Refreshing instance network info cache due to event network-changed-c67ffc89-ea76-4d14-ae35-159171dc99a1. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 558.210441] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Acquiring lock "refresh_cache-b3cb6cbd-a508-4b62-a2b0-14d2963884fb" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.210557] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Acquired lock "refresh_cache-b3cb6cbd-a508-4b62-a2b0-14d2963884fb" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.212411] env[62346]: DEBUG nova.network.neutron [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Refreshing network info cache for port c67ffc89-ea76-4d14-ae35-159171dc99a1 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.558513] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.559051] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.559051] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.693376] env[62346]: DEBUG nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Received event network-changed-1e8e9233-cd73-4b51-92fd-8ae301bed293 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 558.693586] env[62346]: DEBUG nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Refreshing instance network info cache due to event network-changed-1e8e9233-cd73-4b51-92fd-8ae301bed293. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 558.693803] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Acquiring lock "refresh_cache-ef410c62-2279-4be0-bb88-9fb0735eca19" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.693942] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Acquired lock "refresh_cache-ef410c62-2279-4be0-bb88-9fb0735eca19" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.694122] env[62346]: DEBUG nova.network.neutron [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Refreshing network info cache for port 1e8e9233-cd73-4b51-92fd-8ae301bed293 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.808831] env[62346]: DEBUG nova.network.neutron [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Updated VIF entry in instance network info cache for port c67ffc89-ea76-4d14-ae35-159171dc99a1. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 558.809231] env[62346]: DEBUG nova.network.neutron [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Updating instance_info_cache with network_info: [{"id": "c67ffc89-ea76-4d14-ae35-159171dc99a1", "address": "fa:16:3e:0c:2a:31", "network": {"id": "897cd14e-b875-4ab6-a130-878145e1cc7a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1624571353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4144444a7d254b2ba9fcedd8903bd104", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc67ffc89-ea", "ovs_interfaceid": "c67ffc89-ea76-4d14-ae35-159171dc99a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.827028] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Releasing lock "refresh_cache-b3cb6cbd-a508-4b62-a2b0-14d2963884fb" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.827193] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Received event network-vif-plugged-f3601a65-3237-441a-9c1b-b3e029d395e2 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 558.827442] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Acquiring lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.827688] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.827892] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.828113] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] No waiting events found dispatching network-vif-plugged-f3601a65-3237-441a-9c1b-b3e029d395e2 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 558.828341] env[62346]: WARNING nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Received unexpected event network-vif-plugged-f3601a65-3237-441a-9c1b-b3e029d395e2 for instance with vm_state building and task_state spawning. [ 558.828563] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Received event network-changed-f3601a65-3237-441a-9c1b-b3e029d395e2 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 558.828763] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Refreshing instance network info cache due to event network-changed-f3601a65-3237-441a-9c1b-b3e029d395e2. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 558.828986] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Acquiring lock "refresh_cache-f71cb62c-8dc2-4dcc-9da4-2f26c0960531" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.829222] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Acquired lock "refresh_cache-f71cb62c-8dc2-4dcc-9da4-2f26c0960531" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.829446] env[62346]: DEBUG nova.network.neutron [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Refreshing network info cache for port f3601a65-3237-441a-9c1b-b3e029d395e2 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 559.560045] env[62346]: DEBUG nova.network.neutron [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Updated VIF entry in instance network info cache for port 1e8e9233-cd73-4b51-92fd-8ae301bed293. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 559.560424] env[62346]: DEBUG nova.network.neutron [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Updating instance_info_cache with network_info: [{"id": "1e8e9233-cd73-4b51-92fd-8ae301bed293", "address": "fa:16:3e:99:47:5c", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e8e9233-cd", "ovs_interfaceid": "1e8e9233-cd73-4b51-92fd-8ae301bed293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.575065] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Releasing lock "refresh_cache-ef410c62-2279-4be0-bb88-9fb0735eca19" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.575065] env[62346]: DEBUG nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Received event network-vif-plugged-a5cb1e9a-2538-4d65-be9b-6b9544f4bbea {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 559.575065] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Acquiring lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.575863] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.575863] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.575863] env[62346]: DEBUG nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] No waiting events found dispatching network-vif-plugged-a5cb1e9a-2538-4d65-be9b-6b9544f4bbea {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 559.576141] env[62346]: WARNING nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Received unexpected event network-vif-plugged-a5cb1e9a-2538-4d65-be9b-6b9544f4bbea for instance with vm_state building and task_state spawning. [ 559.576141] env[62346]: DEBUG nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Received event network-changed-a5cb1e9a-2538-4d65-be9b-6b9544f4bbea {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 559.576320] env[62346]: DEBUG nova.compute.manager [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Refreshing instance network info cache due to event network-changed-a5cb1e9a-2538-4d65-be9b-6b9544f4bbea. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 559.576470] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Acquiring lock "refresh_cache-6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.576610] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Acquired lock "refresh_cache-6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.576765] env[62346]: DEBUG nova.network.neutron [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Refreshing network info cache for port a5cb1e9a-2538-4d65-be9b-6b9544f4bbea {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 559.728887] env[62346]: DEBUG nova.network.neutron [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Updated VIF entry in instance network info cache for port f3601a65-3237-441a-9c1b-b3e029d395e2. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 559.729442] env[62346]: DEBUG nova.network.neutron [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Updating instance_info_cache with network_info: [{"id": "f3601a65-3237-441a-9c1b-b3e029d395e2", "address": "fa:16:3e:49:d6:55", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3601a65-32", "ovs_interfaceid": "f3601a65-3237-441a-9c1b-b3e029d395e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.743552] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Releasing lock "refresh_cache-f71cb62c-8dc2-4dcc-9da4-2f26c0960531" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.743920] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Received event network-vif-plugged-ed58db88-7792-4719-8d7e-e73848a30062 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 559.743985] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Acquiring lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.745253] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.745253] env[62346]: DEBUG oslo_concurrency.lockutils [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.745253] env[62346]: DEBUG nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] No waiting events found dispatching network-vif-plugged-ed58db88-7792-4719-8d7e-e73848a30062 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 559.745253] env[62346]: WARNING nova.compute.manager [req-b06dadcc-6d00-4d1d-b564-0bdc33ae68ed req-2c025193-3e08-4f41-a6be-47628bd9f385 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Received unexpected event network-vif-plugged-ed58db88-7792-4719-8d7e-e73848a30062 for instance with vm_state building and task_state spawning. [ 560.504842] env[62346]: DEBUG nova.network.neutron [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Updated VIF entry in instance network info cache for port a5cb1e9a-2538-4d65-be9b-6b9544f4bbea. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 560.505512] env[62346]: DEBUG nova.network.neutron [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Updating instance_info_cache with network_info: [{"id": "a5cb1e9a-2538-4d65-be9b-6b9544f4bbea", "address": "fa:16:3e:0d:f0:fb", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5cb1e9a-25", "ovs_interfaceid": "a5cb1e9a-2538-4d65-be9b-6b9544f4bbea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.523375] env[62346]: DEBUG oslo_concurrency.lockutils [req-66e398d9-8a93-4a66-b57c-3a5dc5fa304d req-42c6eaa5-e078-4bdd-80d7-677d85353637 service nova] Releasing lock "refresh_cache-6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.364730] env[62346]: DEBUG nova.compute.manager [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Received event network-changed-ed58db88-7792-4719-8d7e-e73848a30062 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.365032] env[62346]: DEBUG nova.compute.manager [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Refreshing instance network info cache due to event network-changed-ed58db88-7792-4719-8d7e-e73848a30062. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.365141] env[62346]: DEBUG oslo_concurrency.lockutils [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] Acquiring lock "refresh_cache-eac976d1-2988-4106-ac61-59b8c1d9c1a3" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.365397] env[62346]: DEBUG oslo_concurrency.lockutils [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] Acquired lock "refresh_cache-eac976d1-2988-4106-ac61-59b8c1d9c1a3" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.365485] env[62346]: DEBUG nova.network.neutron [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Refreshing network info cache for port ed58db88-7792-4719-8d7e-e73848a30062 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.639584] env[62346]: DEBUG nova.compute.manager [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Received event network-vif-plugged-62ed62bd-9cb7-48b8-b86a-8ea30073423e {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.639797] env[62346]: DEBUG oslo_concurrency.lockutils [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] Acquiring lock "1157187b-7051-4921-bd95-9ef3e2d17104-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.640014] env[62346]: DEBUG oslo_concurrency.lockutils [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] Lock "1157187b-7051-4921-bd95-9ef3e2d17104-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.640172] env[62346]: DEBUG oslo_concurrency.lockutils [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] Lock "1157187b-7051-4921-bd95-9ef3e2d17104-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.640398] env[62346]: DEBUG nova.compute.manager [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] No waiting events found dispatching network-vif-plugged-62ed62bd-9cb7-48b8-b86a-8ea30073423e {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.640490] env[62346]: WARNING nova.compute.manager [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Received unexpected event network-vif-plugged-62ed62bd-9cb7-48b8-b86a-8ea30073423e for instance with vm_state building and task_state spawning. [ 562.641525] env[62346]: DEBUG nova.compute.manager [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Received event network-changed-62ed62bd-9cb7-48b8-b86a-8ea30073423e {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.646332] env[62346]: DEBUG nova.compute.manager [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Refreshing instance network info cache due to event network-changed-62ed62bd-9cb7-48b8-b86a-8ea30073423e. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.646332] env[62346]: DEBUG oslo_concurrency.lockutils [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] Acquiring lock "refresh_cache-1157187b-7051-4921-bd95-9ef3e2d17104" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.646332] env[62346]: DEBUG oslo_concurrency.lockutils [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] Acquired lock "refresh_cache-1157187b-7051-4921-bd95-9ef3e2d17104" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.646332] env[62346]: DEBUG nova.network.neutron [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Refreshing network info cache for port 62ed62bd-9cb7-48b8-b86a-8ea30073423e {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 563.281506] env[62346]: DEBUG nova.network.neutron [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Updated VIF entry in instance network info cache for port ed58db88-7792-4719-8d7e-e73848a30062. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.281884] env[62346]: DEBUG nova.network.neutron [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Updating instance_info_cache with network_info: [{"id": "ed58db88-7792-4719-8d7e-e73848a30062", "address": "fa:16:3e:1b:80:8a", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped58db88-77", "ovs_interfaceid": "ed58db88-7792-4719-8d7e-e73848a30062", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.298536] env[62346]: DEBUG oslo_concurrency.lockutils [req-152f1648-3791-4947-8a04-912f471ede20 req-b7cd396c-9582-436d-95ad-535d38d4f028 service nova] Releasing lock "refresh_cache-eac976d1-2988-4106-ac61-59b8c1d9c1a3" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.801037] env[62346]: DEBUG nova.network.neutron [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Updated VIF entry in instance network info cache for port 62ed62bd-9cb7-48b8-b86a-8ea30073423e. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.801589] env[62346]: DEBUG nova.network.neutron [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Updating instance_info_cache with network_info: [{"id": "62ed62bd-9cb7-48b8-b86a-8ea30073423e", "address": "fa:16:3e:6d:94:5e", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ed62bd-9c", "ovs_interfaceid": "62ed62bd-9cb7-48b8-b86a-8ea30073423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.818585] env[62346]: DEBUG oslo_concurrency.lockutils [req-f2af39f1-5e6e-41b2-a218-451907886dbf req-b50385d2-921d-4175-83e0-0dc04a4a8f7c service nova] Releasing lock "refresh_cache-1157187b-7051-4921-bd95-9ef3e2d17104" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.403130] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "a040a266-a77e-4ef4-ac34-df4781f2a757" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.404224] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.419580] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 566.527412] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.527751] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.529263] env[62346]: INFO nova.compute.claims [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.806483] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c80d3d-d076-46db-81c8-8b1bdc2a5ae7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.818973] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a087f24-a0cb-4a50-90d5-0cb2028a33a0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.858254] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6060ff5b-90bd-4150-9f46-ff90d118d1df {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.871455] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d2845a-5288-4803-b683-3fd941cb6f22 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.890398] env[62346]: DEBUG nova.compute.provider_tree [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.907271] env[62346]: DEBUG nova.scheduler.client.report [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.928746] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.929135] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 566.993838] env[62346]: DEBUG nova.compute.utils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.000951] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 567.000951] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.014725] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 567.117134] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 567.146583] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 567.146843] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 567.147009] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.147385] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 567.147385] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.147493] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 567.147700] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 567.147905] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 567.148076] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 567.148852] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 567.148852] env[62346]: DEBUG nova.virt.hardware [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 567.150893] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7911f793-51b2-4c1d-896e-5be3a9900291 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.159684] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5ab08c-6c09-49ed-86ca-d145f8ded041 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.259590] env[62346]: DEBUG nova.policy [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06c454ef1bdc4511bde4d2a0319d5e86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20ec99e588e349d6b37d8222d8e5019c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 568.421147] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.423098] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.821601] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Successfully created port: 29b1d3df-8b6c-4b20-af6a-66a0c3898224 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.020333] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "29eba656-6430-4009-8d24-c5a6f33bef95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.022369] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "29eba656-6430-4009-8d24-c5a6f33bef95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.136436] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Successfully updated port: 29b1d3df-8b6c-4b20-af6a-66a0c3898224 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 571.153617] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "refresh_cache-a040a266-a77e-4ef4-ac34-df4781f2a757" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.153688] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquired lock "refresh_cache-a040a266-a77e-4ef4-ac34-df4781f2a757" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.153927] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.283489] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.003542] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.004278] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.295881] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.296174] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.318583] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Updating instance_info_cache with network_info: [{"id": "29b1d3df-8b6c-4b20-af6a-66a0c3898224", "address": "fa:16:3e:12:b4:05", "network": {"id": "60b5f66b-daa7-4003-ac53-7306ae8eb617", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-536326488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20ec99e588e349d6b37d8222d8e5019c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b1d3df-8b", "ovs_interfaceid": "29b1d3df-8b6c-4b20-af6a-66a0c3898224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.344279] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Releasing lock "refresh_cache-a040a266-a77e-4ef4-ac34-df4781f2a757" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.344279] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance network_info: |[{"id": "29b1d3df-8b6c-4b20-af6a-66a0c3898224", "address": "fa:16:3e:12:b4:05", "network": {"id": "60b5f66b-daa7-4003-ac53-7306ae8eb617", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-536326488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20ec99e588e349d6b37d8222d8e5019c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b1d3df-8b", "ovs_interfaceid": "29b1d3df-8b6c-4b20-af6a-66a0c3898224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 572.344430] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:b4:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29b1d3df-8b6c-4b20-af6a-66a0c3898224', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.352145] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Creating folder: Project (20ec99e588e349d6b37d8222d8e5019c). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.352794] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3261535-4ccf-4190-8dd8-b41f3cf3eb66 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.370109] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Created folder: Project (20ec99e588e349d6b37d8222d8e5019c) in parent group-v953204. [ 572.370109] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Creating folder: Instances. Parent ref: group-v953232. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.370109] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d30f668c-707a-4cd8-b485-5cfe4f980be7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.379561] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Created folder: Instances in parent group-v953232. [ 572.379703] env[62346]: DEBUG oslo.service.loopingcall [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.380045] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.380135] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c15f659e-dcbc-40f9-a75d-753f4f68851b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.404498] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.404498] env[62346]: value = "task-4891600" [ 572.404498] env[62346]: _type = "Task" [ 572.404498] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.414842] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891600, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.916458] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891600, 'name': CreateVM_Task, 'duration_secs': 0.448942} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.916680] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 572.917415] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.917614] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.917967] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.918262] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97e1e037-55da-4741-ac98-fc0ebf5a64fc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.923707] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Waiting for the task: (returnval){ [ 572.923707] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520ac2ee-3aab-2b2a-9eab-31689a5ef320" [ 572.923707] env[62346]: _type = "Task" [ 572.923707] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.935044] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520ac2ee-3aab-2b2a-9eab-31689a5ef320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.439325] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.439586] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.439797] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.553542] env[62346]: DEBUG nova.compute.manager [req-e716c081-6567-48f4-9ef9-f83e6e0ada93 req-79a31e95-40cf-423a-9d96-1352c8b49c40 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Received event network-vif-plugged-29b1d3df-8b6c-4b20-af6a-66a0c3898224 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 574.553799] env[62346]: DEBUG oslo_concurrency.lockutils [req-e716c081-6567-48f4-9ef9-f83e6e0ada93 req-79a31e95-40cf-423a-9d96-1352c8b49c40 service nova] Acquiring lock "a040a266-a77e-4ef4-ac34-df4781f2a757-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.554430] env[62346]: DEBUG oslo_concurrency.lockutils [req-e716c081-6567-48f4-9ef9-f83e6e0ada93 req-79a31e95-40cf-423a-9d96-1352c8b49c40 service nova] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.554430] env[62346]: DEBUG oslo_concurrency.lockutils [req-e716c081-6567-48f4-9ef9-f83e6e0ada93 req-79a31e95-40cf-423a-9d96-1352c8b49c40 service nova] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.554430] env[62346]: DEBUG nova.compute.manager [req-e716c081-6567-48f4-9ef9-f83e6e0ada93 req-79a31e95-40cf-423a-9d96-1352c8b49c40 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] No waiting events found dispatching network-vif-plugged-29b1d3df-8b6c-4b20-af6a-66a0c3898224 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 574.554430] env[62346]: WARNING nova.compute.manager [req-e716c081-6567-48f4-9ef9-f83e6e0ada93 req-79a31e95-40cf-423a-9d96-1352c8b49c40 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Received unexpected event network-vif-plugged-29b1d3df-8b6c-4b20-af6a-66a0c3898224 for instance with vm_state building and task_state spawning. [ 575.050049] env[62346]: DEBUG oslo_concurrency.lockutils [None req-70719889-ef6e-4113-92cb-66bc2c91020a tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] Acquiring lock "3dbb3789-e23a-4810-801e-dbe4f76d97fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.050927] env[62346]: DEBUG oslo_concurrency.lockutils [None req-70719889-ef6e-4113-92cb-66bc2c91020a tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] Lock "3dbb3789-e23a-4810-801e-dbe4f76d97fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.578520] env[62346]: DEBUG oslo_concurrency.lockutils [None req-cb7ed128-cc69-4a7c-b2f8-a2a82fafdc3f tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] Acquiring lock "f0a463df-07ca-48b0-9254-a1a746d3f5b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.579394] env[62346]: DEBUG oslo_concurrency.lockutils [None req-cb7ed128-cc69-4a7c-b2f8-a2a82fafdc3f tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] Lock "f0a463df-07ca-48b0-9254-a1a746d3f5b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.622939] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c856d94d-a185-45bb-8ef5-c69789df8094 tempest-InstanceActionsV221TestJSON-2087366002 tempest-InstanceActionsV221TestJSON-2087366002-project-member] Acquiring lock "ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.623278] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c856d94d-a185-45bb-8ef5-c69789df8094 tempest-InstanceActionsV221TestJSON-2087366002 tempest-InstanceActionsV221TestJSON-2087366002-project-member] Lock "ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.814640] env[62346]: DEBUG nova.compute.manager [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Received event network-changed-29b1d3df-8b6c-4b20-af6a-66a0c3898224 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 578.814724] env[62346]: DEBUG nova.compute.manager [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Refreshing instance network info cache due to event network-changed-29b1d3df-8b6c-4b20-af6a-66a0c3898224. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 578.814994] env[62346]: DEBUG oslo_concurrency.lockutils [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] Acquiring lock "refresh_cache-a040a266-a77e-4ef4-ac34-df4781f2a757" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.815306] env[62346]: DEBUG oslo_concurrency.lockutils [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] Acquired lock "refresh_cache-a040a266-a77e-4ef4-ac34-df4781f2a757" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.815306] env[62346]: DEBUG nova.network.neutron [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Refreshing network info cache for port 29b1d3df-8b6c-4b20-af6a-66a0c3898224 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 579.305460] env[62346]: DEBUG oslo_concurrency.lockutils [None req-adb57c27-05e0-4c68-acaf-4ec6054ddb86 tempest-ServersWithSpecificFlavorTestJSON-914987821 tempest-ServersWithSpecificFlavorTestJSON-914987821-project-member] Acquiring lock "84536893-aa88-4ac3-8340-509cb9d88088" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.305745] env[62346]: DEBUG oslo_concurrency.lockutils [None req-adb57c27-05e0-4c68-acaf-4ec6054ddb86 tempest-ServersWithSpecificFlavorTestJSON-914987821 tempest-ServersWithSpecificFlavorTestJSON-914987821-project-member] Lock "84536893-aa88-4ac3-8340-509cb9d88088" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.636133] env[62346]: DEBUG nova.network.neutron [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Updated VIF entry in instance network info cache for port 29b1d3df-8b6c-4b20-af6a-66a0c3898224. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 579.636474] env[62346]: DEBUG nova.network.neutron [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Updating instance_info_cache with network_info: [{"id": "29b1d3df-8b6c-4b20-af6a-66a0c3898224", "address": "fa:16:3e:12:b4:05", "network": {"id": "60b5f66b-daa7-4003-ac53-7306ae8eb617", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-536326488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20ec99e588e349d6b37d8222d8e5019c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b1d3df-8b", "ovs_interfaceid": "29b1d3df-8b6c-4b20-af6a-66a0c3898224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.657894] env[62346]: DEBUG oslo_concurrency.lockutils [req-38fec0bc-1633-4ba4-b3ba-e523426635ae req-9fd64526-fadf-4777-b592-c41ea0920e20 service nova] Releasing lock "refresh_cache-a040a266-a77e-4ef4-ac34-df4781f2a757" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.195801] env[62346]: DEBUG oslo_concurrency.lockutils [None req-0e170052-420f-4eb3-abeb-f6e480838b41 tempest-ServerPasswordTestJSON-555314818 tempest-ServerPasswordTestJSON-555314818-project-member] Acquiring lock "f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.196214] env[62346]: DEBUG oslo_concurrency.lockutils [None req-0e170052-420f-4eb3-abeb-f6e480838b41 tempest-ServerPasswordTestJSON-555314818 tempest-ServerPasswordTestJSON-555314818-project-member] Lock "f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.767870] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f5a5b1a5-b7de-4867-8706-4c52f2e81245 tempest-ServersV294TestFqdnHostnames-229906635 tempest-ServersV294TestFqdnHostnames-229906635-project-member] Acquiring lock "daf47742-054a-496d-b754-3a2687ebe973" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.768117] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f5a5b1a5-b7de-4867-8706-4c52f2e81245 tempest-ServersV294TestFqdnHostnames-229906635 tempest-ServersV294TestFqdnHostnames-229906635-project-member] Lock "daf47742-054a-496d-b754-3a2687ebe973" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.905110] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "3bca1346-07e6-4514-8ea0-5783b9640849" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.905448] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "3bca1346-07e6-4514-8ea0-5783b9640849" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.516996] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b7f44b87-01b8-4426-a290-0ce6f1918436 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "6ec381e8-762e-4136-863b-2b1a566abb9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.517946] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b7f44b87-01b8-4426-a290-0ce6f1918436 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "6ec381e8-762e-4136-863b-2b1a566abb9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.724632] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f9776e03-d554-4643-9056-c2f9cec39b1e tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "64da10a6-4063-4288-88ab-ae97b8c1fd88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.725316] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f9776e03-d554-4643-9056-c2f9cec39b1e tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "64da10a6-4063-4288-88ab-ae97b8c1fd88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.203771] env[62346]: WARNING oslo_vmware.rw_handles [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 592.203771] env[62346]: ERROR oslo_vmware.rw_handles [ 592.204348] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 592.206341] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 592.206700] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Copying Virtual Disk [datastore2] vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/e387c426-6834-4191-9a28-bb0e475f5ba4/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 592.207038] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f103895-5287-4d0d-ba10-e7f6df4f7ad6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.219342] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Waiting for the task: (returnval){ [ 592.219342] env[62346]: value = "task-4891610" [ 592.219342] env[62346]: _type = "Task" [ 592.219342] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.231752] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Task: {'id': task-4891610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.736451] env[62346]: DEBUG oslo_vmware.exceptions [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 592.736451] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.742727] env[62346]: ERROR nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 592.742727] env[62346]: Faults: ['InvalidArgument'] [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Traceback (most recent call last): [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] yield resources [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self.driver.spawn(context, instance, image_meta, [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self._fetch_image_if_missing(context, vi) [ 592.742727] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] image_cache(vi, tmp_image_ds_loc) [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] vm_util.copy_virtual_disk( [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] session._wait_for_task(vmdk_copy_task) [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] return self.wait_for_task(task_ref) [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] return evt.wait() [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] result = hub.switch() [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 592.745716] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] return self.greenlet.switch() [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self.f(*self.args, **self.kw) [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] raise exceptions.translate_fault(task_info.error) [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Faults: ['InvalidArgument'] [ 592.746217] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] [ 592.746217] env[62346]: INFO nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Terminating instance [ 592.746217] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.749010] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.749010] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "refresh_cache-d1dcf710-5f98-478f-ada7-c07bc99b6a2e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.749010] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquired lock "refresh_cache-d1dcf710-5f98-478f-ada7-c07bc99b6a2e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.749010] env[62346]: DEBUG nova.network.neutron [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.751261] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5f0e983-a558-4296-adfa-01016350fc52 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.767114] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.767114] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 592.767303] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a63b744-c6de-41ea-839d-c6393425f5b7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.773792] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Waiting for the task: (returnval){ [ 592.773792] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52155ec9-1fff-0696-7a64-4016219d9672" [ 592.773792] env[62346]: _type = "Task" [ 592.773792] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.786302] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52155ec9-1fff-0696-7a64-4016219d9672, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.826102] env[62346]: DEBUG nova.network.neutron [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.039868] env[62346]: DEBUG nova.network.neutron [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.053060] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Releasing lock "refresh_cache-d1dcf710-5f98-478f-ada7-c07bc99b6a2e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.053060] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 593.053060] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 593.053507] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ad0cc9-fc7d-4d4a-a138-76c459f40b4e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.068831] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 593.068831] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-deb0fc9e-9d27-45ae-93b5-22f1fa6f249a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.098019] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 593.098019] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 593.098019] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Deleting the datastore file [datastore2] d1dcf710-5f98-478f-ada7-c07bc99b6a2e {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 593.098019] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb807ba0-9b17-47ca-a28a-238f21ff77da {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.107490] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Waiting for the task: (returnval){ [ 593.107490] env[62346]: value = "task-4891612" [ 593.107490] env[62346]: _type = "Task" [ 593.107490] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.122469] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Task: {'id': task-4891612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.285615] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 593.286449] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Creating directory with path [datastore2] vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 593.286710] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3aab5ab-9f1d-4618-8eb7-b7306b5e5a9a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.302962] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Created directory with path [datastore2] vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 593.303413] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Fetch image to [datastore2] vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 593.303729] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 593.304679] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0ce2fa-9e28-4292-b8a1-b3de1231c499 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.315555] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865cf482-2620-4235-856c-d126c8ec7623 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.327564] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd23a26-15fb-4943-b81a-6b1143267e92 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.369925] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d8e28d-5a75-4f48-9435-23296a91c997 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.378629] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-24e4fa96-0443-4b64-b7fa-3943dc812e3a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.403729] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 593.525794] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 593.594316] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 593.594664] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 593.618894] env[62346]: DEBUG oslo_vmware.api [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Task: {'id': task-4891612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034555} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.619284] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 593.619729] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 593.620113] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 593.620527] env[62346]: INFO nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Took 0.57 seconds to destroy the instance on the hypervisor. [ 593.620938] env[62346]: DEBUG oslo.service.loopingcall [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.621078] env[62346]: DEBUG nova.compute.manager [-] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 593.626300] env[62346]: DEBUG nova.compute.claims [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 593.626300] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.626300] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.819998] env[62346]: DEBUG oslo_concurrency.lockutils [None req-efa89789-c7d6-460d-bd4d-00fd225dbf4d tempest-FloatingIPsAssociationTestJSON-675611170 tempest-FloatingIPsAssociationTestJSON-675611170-project-member] Acquiring lock "35d268f6-0573-4f9b-85ac-09359c56ef8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.819998] env[62346]: DEBUG oslo_concurrency.lockutils [None req-efa89789-c7d6-460d-bd4d-00fd225dbf4d tempest-FloatingIPsAssociationTestJSON-675611170 tempest-FloatingIPsAssociationTestJSON-675611170-project-member] Lock "35d268f6-0573-4f9b-85ac-09359c56ef8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.151955] env[62346]: DEBUG oslo_concurrency.lockutils [None req-124b6992-e84d-4e25-ae7d-dc5cc6bb9eb8 tempest-AttachInterfacesV270Test-1166619584 tempest-AttachInterfacesV270Test-1166619584-project-member] Acquiring lock "c48b9d0d-37c3-47bb-9f9a-4055eb607c93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.152218] env[62346]: DEBUG oslo_concurrency.lockutils [None req-124b6992-e84d-4e25-ae7d-dc5cc6bb9eb8 tempest-AttachInterfacesV270Test-1166619584 tempest-AttachInterfacesV270Test-1166619584-project-member] Lock "c48b9d0d-37c3-47bb-9f9a-4055eb607c93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.212234] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447ec45c-9a9f-4941-9d94-c78e9f0a2a47 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.226551] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8b40b5-fd03-46a4-b735-d119fad369c1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.259578] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20401cdf-80a2-45ef-9aed-edc078416868 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.266661] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f177e7-b217-49f6-8828-68044d5fab9c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.288425] env[62346]: DEBUG nova.compute.provider_tree [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.303429] env[62346]: DEBUG nova.scheduler.client.report [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.324112] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.699s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.324678] env[62346]: ERROR nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 594.324678] env[62346]: Faults: ['InvalidArgument'] [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Traceback (most recent call last): [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self.driver.spawn(context, instance, image_meta, [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self._fetch_image_if_missing(context, vi) [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] image_cache(vi, tmp_image_ds_loc) [ 594.324678] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] vm_util.copy_virtual_disk( [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] session._wait_for_task(vmdk_copy_task) [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] return self.wait_for_task(task_ref) [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] return evt.wait() [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] result = hub.switch() [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] return self.greenlet.switch() [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 594.325604] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] self.f(*self.args, **self.kw) [ 594.326640] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 594.326640] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] raise exceptions.translate_fault(task_info.error) [ 594.326640] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 594.326640] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Faults: ['InvalidArgument'] [ 594.326640] env[62346]: ERROR nova.compute.manager [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] [ 594.326640] env[62346]: DEBUG nova.compute.utils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 594.331605] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Build of instance d1dcf710-5f98-478f-ada7-c07bc99b6a2e was re-scheduled: A specified parameter was not correct: fileType [ 594.331605] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 594.332175] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 594.332368] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquiring lock "refresh_cache-d1dcf710-5f98-478f-ada7-c07bc99b6a2e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.333035] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Acquired lock "refresh_cache-d1dcf710-5f98-478f-ada7-c07bc99b6a2e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.333035] env[62346]: DEBUG nova.network.neutron [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 594.420195] env[62346]: DEBUG nova.network.neutron [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.574946] env[62346]: DEBUG nova.network.neutron [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.586641] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Releasing lock "refresh_cache-d1dcf710-5f98-478f-ada7-c07bc99b6a2e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.586971] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 594.587219] env[62346]: DEBUG nova.compute.manager [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] [instance: d1dcf710-5f98-478f-ada7-c07bc99b6a2e] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 594.740536] env[62346]: INFO nova.scheduler.client.report [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Deleted allocations for instance d1dcf710-5f98-478f-ada7-c07bc99b6a2e [ 594.768546] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d6d293b-03a0-4637-a5e2-b76da913f735 tempest-ServersAdmin275Test-869648772 tempest-ServersAdmin275Test-869648772-project-member] Lock "d1dcf710-5f98-478f-ada7-c07bc99b6a2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.259s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.795951] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 594.869545] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.869785] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.871574] env[62346]: INFO nova.compute.claims [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.381164] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e418fca-5641-4faf-a265-6735c3da97ac {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.390455] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9497410-bf73-4655-8503-fad6c9398b16 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.427144] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f19b63-0499-4c55-8b05-0f3c935dee35 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.437876] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f927e8-4d27-49aa-95a8-624a6c4d521c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.455955] env[62346]: DEBUG nova.compute.provider_tree [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.470428] env[62346]: DEBUG nova.scheduler.client.report [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.491848] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.622s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.492373] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 595.554672] env[62346]: DEBUG nova.compute.utils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 595.556479] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 595.560025] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 595.570511] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 595.667019] env[62346]: DEBUG nova.policy [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'abe531327c22473989d0ca0c3e16d419', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e556bdf0bd9c4ea9a15e33b2ecde4f11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 595.669485] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 595.705272] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.705560] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.705719] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.706089] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.706183] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.706309] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.706526] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.706699] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.706867] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.707055] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.707223] env[62346]: DEBUG nova.virt.hardware [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.708514] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a791650c-ef24-45cc-931d-f334279fc93b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.719415] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929f1ad6-61b1-44ba-8213-ee0906792ce2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.689457] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Successfully created port: dff92cc4-6450-4d31-b4ab-4ceebc23b299 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.597060] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Successfully updated port: dff92cc4-6450-4d31-b4ab-4ceebc23b299 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 598.613538] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "refresh_cache-7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.613747] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquired lock "refresh_cache-7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.613904] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.714090] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.068344] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Updating instance_info_cache with network_info: [{"id": "dff92cc4-6450-4d31-b4ab-4ceebc23b299", "address": "fa:16:3e:3c:a9:d8", "network": {"id": "ae91b80b-8149-4908-ad2d-7177a17cfccb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2058547191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e556bdf0bd9c4ea9a15e33b2ecde4f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "980cb890-345b-4bf8-990a-a2faec78e49c", "external-id": "nsx-vlan-transportzone-965", "segmentation_id": 965, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdff92cc4-64", "ovs_interfaceid": "dff92cc4-6450-4d31-b4ab-4ceebc23b299", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.085369] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Releasing lock "refresh_cache-7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.085369] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance network_info: |[{"id": "dff92cc4-6450-4d31-b4ab-4ceebc23b299", "address": "fa:16:3e:3c:a9:d8", "network": {"id": "ae91b80b-8149-4908-ad2d-7177a17cfccb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2058547191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e556bdf0bd9c4ea9a15e33b2ecde4f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "980cb890-345b-4bf8-990a-a2faec78e49c", "external-id": "nsx-vlan-transportzone-965", "segmentation_id": 965, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdff92cc4-64", "ovs_interfaceid": "dff92cc4-6450-4d31-b4ab-4ceebc23b299", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 599.085712] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:a9:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '980cb890-345b-4bf8-990a-a2faec78e49c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dff92cc4-6450-4d31-b4ab-4ceebc23b299', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.096483] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Creating folder: Project (e556bdf0bd9c4ea9a15e33b2ecde4f11). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.097178] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-947fb240-9531-43d2-82ba-33589566749c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.108743] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Created folder: Project (e556bdf0bd9c4ea9a15e33b2ecde4f11) in parent group-v953204. [ 599.108941] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Creating folder: Instances. Parent ref: group-v953239. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.109199] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8a50fc1-19bb-4300-9df0-e673da6a5f9f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.120524] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Created folder: Instances in parent group-v953239. [ 599.120738] env[62346]: DEBUG oslo.service.loopingcall [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.120921] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 599.121134] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64184698-4c7f-443e-b0c4-5da4269409e8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.142098] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.142098] env[62346]: value = "task-4891617" [ 599.142098] env[62346]: _type = "Task" [ 599.142098] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.153985] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891617, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.470686] env[62346]: DEBUG nova.compute.manager [req-1ce68daf-2ca9-49ed-871e-df3d2b45a5f0 req-74a2e4c2-d823-4498-b905-b9a75bee1ada service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Received event network-vif-plugged-dff92cc4-6450-4d31-b4ab-4ceebc23b299 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 599.470958] env[62346]: DEBUG oslo_concurrency.lockutils [req-1ce68daf-2ca9-49ed-871e-df3d2b45a5f0 req-74a2e4c2-d823-4498-b905-b9a75bee1ada service nova] Acquiring lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.471651] env[62346]: DEBUG oslo_concurrency.lockutils [req-1ce68daf-2ca9-49ed-871e-df3d2b45a5f0 req-74a2e4c2-d823-4498-b905-b9a75bee1ada service nova] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.471899] env[62346]: DEBUG oslo_concurrency.lockutils [req-1ce68daf-2ca9-49ed-871e-df3d2b45a5f0 req-74a2e4c2-d823-4498-b905-b9a75bee1ada service nova] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.472100] env[62346]: DEBUG nova.compute.manager [req-1ce68daf-2ca9-49ed-871e-df3d2b45a5f0 req-74a2e4c2-d823-4498-b905-b9a75bee1ada service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] No waiting events found dispatching network-vif-plugged-dff92cc4-6450-4d31-b4ab-4ceebc23b299 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 599.472323] env[62346]: WARNING nova.compute.manager [req-1ce68daf-2ca9-49ed-871e-df3d2b45a5f0 req-74a2e4c2-d823-4498-b905-b9a75bee1ada service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Received unexpected event network-vif-plugged-dff92cc4-6450-4d31-b4ab-4ceebc23b299 for instance with vm_state building and task_state spawning. [ 599.652851] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891617, 'name': CreateVM_Task, 'duration_secs': 0.354124} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.653285] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.654099] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.654269] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.654616] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 599.654989] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9242b764-e744-4bc2-a90b-53d0cb1d6fe5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.664838] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Waiting for the task: (returnval){ [ 599.664838] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ec30d9-347b-1070-102c-7762f9cd3c75" [ 599.664838] env[62346]: _type = "Task" [ 599.664838] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.680469] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ec30d9-347b-1070-102c-7762f9cd3c75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.182124] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.182578] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.182938] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.214595] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3947c1a3-1ea7-4423-b658-41f93041f8e6 tempest-AttachInterfacesUnderV243Test-819989224 tempest-AttachInterfacesUnderV243Test-819989224-project-member] Acquiring lock "2aa479bb-71e8-4d21-9af8-d2af9d284c14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.214958] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3947c1a3-1ea7-4423-b658-41f93041f8e6 tempest-AttachInterfacesUnderV243Test-819989224 tempest-AttachInterfacesUnderV243Test-819989224-project-member] Lock "2aa479bb-71e8-4d21-9af8-d2af9d284c14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.810402] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b7028a10-40f8-482d-86df-f73db6033fe7 tempest-ServersTestBootFromVolume-936315030 tempest-ServersTestBootFromVolume-936315030-project-member] Acquiring lock "cc78290c-3615-4e59-b0f2-95d967b07569" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.812017] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b7028a10-40f8-482d-86df-f73db6033fe7 tempest-ServersTestBootFromVolume-936315030 tempest-ServersTestBootFromVolume-936315030-project-member] Lock "cc78290c-3615-4e59-b0f2-95d967b07569" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.410244] env[62346]: DEBUG nova.compute.manager [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Received event network-changed-dff92cc4-6450-4d31-b4ab-4ceebc23b299 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 603.410900] env[62346]: DEBUG nova.compute.manager [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Refreshing instance network info cache due to event network-changed-dff92cc4-6450-4d31-b4ab-4ceebc23b299. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 603.410900] env[62346]: DEBUG oslo_concurrency.lockutils [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] Acquiring lock "refresh_cache-7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.411808] env[62346]: DEBUG oslo_concurrency.lockutils [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] Acquired lock "refresh_cache-7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.414474] env[62346]: DEBUG nova.network.neutron [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Refreshing network info cache for port dff92cc4-6450-4d31-b4ab-4ceebc23b299 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 604.531132] env[62346]: DEBUG nova.network.neutron [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Updated VIF entry in instance network info cache for port dff92cc4-6450-4d31-b4ab-4ceebc23b299. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 604.531518] env[62346]: DEBUG nova.network.neutron [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Updating instance_info_cache with network_info: [{"id": "dff92cc4-6450-4d31-b4ab-4ceebc23b299", "address": "fa:16:3e:3c:a9:d8", "network": {"id": "ae91b80b-8149-4908-ad2d-7177a17cfccb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2058547191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e556bdf0bd9c4ea9a15e33b2ecde4f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "980cb890-345b-4bf8-990a-a2faec78e49c", "external-id": "nsx-vlan-transportzone-965", "segmentation_id": 965, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdff92cc4-64", "ovs_interfaceid": "dff92cc4-6450-4d31-b4ab-4ceebc23b299", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.546576] env[62346]: DEBUG oslo_concurrency.lockutils [req-f9f43839-3596-4400-9d87-2e33d70af8cc req-65e83164-ef04-46f1-b2b9-8faec46bdf2c service nova] Releasing lock "refresh_cache-7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.843213] env[62346]: DEBUG oslo_concurrency.lockutils [None req-85a434c8-5c4f-41b1-9cf5-16e84341eebe tempest-ServersTestManualDisk-608428885 tempest-ServersTestManualDisk-608428885-project-member] Acquiring lock "d87291e9-b698-4e6c-a265-55c00b863ac1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.843611] env[62346]: DEBUG oslo_concurrency.lockutils [None req-85a434c8-5c4f-41b1-9cf5-16e84341eebe tempest-ServersTestManualDisk-608428885 tempest-ServersTestManualDisk-608428885-project-member] Lock "d87291e9-b698-4e6c-a265-55c00b863ac1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.888605] env[62346]: DEBUG oslo_concurrency.lockutils [None req-da74f336-8254-4aaa-a28a-803daf3e111d tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "682b49a4-4901-4c62-9e5a-cf03047e1cb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.888865] env[62346]: DEBUG oslo_concurrency.lockutils [None req-da74f336-8254-4aaa-a28a-803daf3e111d tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "682b49a4-4901-4c62-9e5a-cf03047e1cb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.633282] env[62346]: DEBUG oslo_concurrency.lockutils [None req-03b9db48-34b0-49fe-9cdf-c178e57c096a tempest-ServerTagsTestJSON-173515555 tempest-ServerTagsTestJSON-173515555-project-member] Acquiring lock "865c8799-4a94-41af-921c-c4206331ba81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.633631] env[62346]: DEBUG oslo_concurrency.lockutils [None req-03b9db48-34b0-49fe-9cdf-c178e57c096a tempest-ServerTagsTestJSON-173515555 tempest-ServerTagsTestJSON-173515555-project-member] Lock "865c8799-4a94-41af-921c-c4206331ba81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.308535] env[62346]: DEBUG oslo_concurrency.lockutils [None req-50e9d95b-322e-4559-9767-a3101fa00193 tempest-ServersNegativeTestJSON-1450782741 tempest-ServersNegativeTestJSON-1450782741-project-member] Acquiring lock "f0d00a15-7869-4b1a-bf6f-a7f5e1c88138" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.308949] env[62346]: DEBUG oslo_concurrency.lockutils [None req-50e9d95b-322e-4559-9767-a3101fa00193 tempest-ServersNegativeTestJSON-1450782741 tempest-ServersNegativeTestJSON-1450782741-project-member] Lock "f0d00a15-7869-4b1a-bf6f-a7f5e1c88138" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.282958] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3ca091da-3cc6-4226-b25e-19f78f96bf8b tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] Acquiring lock "e2bfd80f-9e03-4dd1-bd28-fb33b54463b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.283397] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3ca091da-3cc6-4226-b25e-19f78f96bf8b tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] Lock "e2bfd80f-9e03-4dd1-bd28-fb33b54463b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.824643] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c741b35-9ddc-4760-8ba8-14e510d6ce82 tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] Acquiring lock "c6a1f6a2-05be-43d5-a2c3-36bc5a878434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.825290] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c741b35-9ddc-4760-8ba8-14e510d6ce82 tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] Lock "c6a1f6a2-05be-43d5-a2c3-36bc5a878434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.811268] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.838783] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.838895] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 614.839125] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 614.862438] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.862605] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.862739] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.862865] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.863103] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.863423] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.863552] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.863761] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.863883] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.864054] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 614.864227] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 614.864768] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.865008] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.879542] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.881026] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.881026] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.881026] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 614.881386] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c75bd4-858c-4a52-808c-76f13823b7d3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.892268] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971f16aa-5512-48e6-b737-0db3d71e891f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.907503] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f556d3f8-9392-4f67-b79e-16b337ff8aa5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.914772] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5431aca7-bc4d-4615-86f0-a69144cf166a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.945856] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180540MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 614.946089] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.946278] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.026614] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b3cb6cbd-a508-4b62-a2b0-14d2963884fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026614] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ef410c62-2279-4be0-bb88-9fb0735eca19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026614] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026614] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026884] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026884] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026884] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67fbd771-9a97-428e-9453-4e1eba7e141d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.026884] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9684739f-82ae-4738-8d27-9d273b547ad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.027046] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.027046] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.071779] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.099032] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.113516] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.125611] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3dbb3789-e23a-4810-801e-dbe4f76d97fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.139136] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0a463df-07ca-48b0-9254-a1a746d3f5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.152722] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.163083] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 84536893-aa88-4ac3-8340-509cb9d88088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.177263] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.190191] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daf47742-054a-496d-b754-3a2687ebe973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.201116] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.213445] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6ec381e8-762e-4136-863b-2b1a566abb9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.226779] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 64da10a6-4063-4288-88ab-ae97b8c1fd88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.238905] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 35d268f6-0573-4f9b-85ac-09359c56ef8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.251918] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c48b9d0d-37c3-47bb-9f9a-4055eb607c93 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.264638] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2aa479bb-71e8-4d21-9af8-d2af9d284c14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.275243] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance cc78290c-3615-4e59-b0f2-95d967b07569 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.292436] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d87291e9-b698-4e6c-a265-55c00b863ac1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.307019] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 682b49a4-4901-4c62-9e5a-cf03047e1cb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.316770] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 865c8799-4a94-41af-921c-c4206331ba81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.328577] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0d00a15-7869-4b1a-bf6f-a7f5e1c88138 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.340922] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e2bfd80f-9e03-4dd1-bd28-fb33b54463b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.357022] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6a1f6a2-05be-43d5-a2c3-36bc5a878434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.357022] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 615.357022] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_4144444a7d254b2ba9fcedd8903bd104': '1', 'io_workload': '10', 'num_proj_bf244586f5044940ad18cc8fcc81adbf': '1', 'num_proj_1dda2d64ca9c4dca96b9ed840a531345': '1', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'num_proj_67d7b1e9bec14d2f8be2b7d5f9da1973': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_72448b8fe7224f62a5bc90c77852c499': '1', 'num_proj_b5c07e50d0924dae920d326b8ff7fe33': '1', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 615.870134] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b944fbe-b7a9-4174-8aa3-3591f59d1419 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.878693] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e978599f-5e47-465c-a065-a7b725bb9ea7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.917390] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfc0e9c-cbec-4613-8f3b-c2cf91af7915 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.925892] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15d5c90-3916-406f-a916-c0e77eda3fa6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.940951] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.951079] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 615.969276] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 615.969419] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.023s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.324793] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.325091] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.325259] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.325416] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.325570] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.325850] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.325850] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 620.012249] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a97b669d-c0f3-4914-8f89-a0b217393d0f tempest-InstanceActionsNegativeTestJSON-1402465515 tempest-InstanceActionsNegativeTestJSON-1402465515-project-member] Acquiring lock "55ab9511-9e85-426e-b6bc-829f88c534f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.012557] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a97b669d-c0f3-4914-8f89-a0b217393d0f tempest-InstanceActionsNegativeTestJSON-1402465515 tempest-InstanceActionsNegativeTestJSON-1402465515-project-member] Lock "55ab9511-9e85-426e-b6bc-829f88c534f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.793679] env[62346]: WARNING oslo_vmware.rw_handles [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 640.793679] env[62346]: ERROR oslo_vmware.rw_handles [ 640.794324] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 640.796702] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 640.796702] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Copying Virtual Disk [datastore2] vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/18810459-9f16-4f71-817c-11b9cb1bbfae/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 640.796702] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3961be8-62e2-487a-9e63-8562ae9661b5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.805254] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Waiting for the task: (returnval){ [ 640.805254] env[62346]: value = "task-4891618" [ 640.805254] env[62346]: _type = "Task" [ 640.805254] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.813534] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Task: {'id': task-4891618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.315981] env[62346]: DEBUG oslo_vmware.exceptions [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 641.316247] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.316911] env[62346]: ERROR nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 641.316911] env[62346]: Faults: ['InvalidArgument'] [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Traceback (most recent call last): [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] yield resources [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self.driver.spawn(context, instance, image_meta, [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self._fetch_image_if_missing(context, vi) [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 641.316911] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] image_cache(vi, tmp_image_ds_loc) [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] vm_util.copy_virtual_disk( [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] session._wait_for_task(vmdk_copy_task) [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] return self.wait_for_task(task_ref) [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] return evt.wait() [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] result = hub.switch() [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] return self.greenlet.switch() [ 641.317392] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 641.317944] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self.f(*self.args, **self.kw) [ 641.317944] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 641.317944] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] raise exceptions.translate_fault(task_info.error) [ 641.317944] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 641.317944] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Faults: ['InvalidArgument'] [ 641.317944] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] [ 641.317944] env[62346]: INFO nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Terminating instance [ 641.318805] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.319021] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.319258] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fff991c5-ba7c-4ff8-a74e-96e9d1b1d13f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.322449] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 641.322686] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 641.323513] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34eb1c12-ec24-4f33-9596-5d04875d655f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.327925] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.328111] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 641.330596] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13a20742-8d2c-4abf-90f2-6ae50f6d48fa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.332818] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 641.333261] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16cda06c-8aa0-4540-82f8-8f19487cb651 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.337650] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Waiting for the task: (returnval){ [ 641.337650] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]526ef01d-cc6c-ff1f-25fe-4e8c025cce26" [ 641.337650] env[62346]: _type = "Task" [ 641.337650] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.345461] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]526ef01d-cc6c-ff1f-25fe-4e8c025cce26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.405557] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 641.405830] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 641.406023] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Deleting the datastore file [datastore2] b3cb6cbd-a508-4b62-a2b0-14d2963884fb {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 641.406305] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72ae84c5-87f5-4ef8-80e1-445a4ca3f8b0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.412537] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Waiting for the task: (returnval){ [ 641.412537] env[62346]: value = "task-4891620" [ 641.412537] env[62346]: _type = "Task" [ 641.412537] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.420465] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Task: {'id': task-4891620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.849044] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 641.849350] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Creating directory with path [datastore2] vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.849549] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f542da53-f3df-4dbe-bb17-43ab9f96d630 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.862424] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Created directory with path [datastore2] vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.862646] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Fetch image to [datastore2] vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 641.862805] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 641.863602] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc9a47a-0003-42af-be1c-af0b7f3bb53d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.871254] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d20ca2-ad26-49c4-bbc1-692742b1e5d8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.880898] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a2f028-60f2-404b-92e4-032699a5e355 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.916859] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411687dc-db90-48ca-9aa6-f19b93080231 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.926222] env[62346]: DEBUG oslo_vmware.api [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Task: {'id': task-4891620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078161} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.926427] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1fa98eb0-ebde-4f3e-aadc-1e9240f41a68 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.928203] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 641.928395] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 641.928565] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 641.928800] env[62346]: INFO nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Took 0.61 seconds to destroy the instance on the hypervisor. [ 641.931662] env[62346]: DEBUG nova.compute.claims [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 641.931836] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.932051] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.950248] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 642.012266] env[62346]: DEBUG oslo_vmware.rw_handles [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 642.080676] env[62346]: DEBUG oslo_vmware.rw_handles [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 642.080745] env[62346]: DEBUG oslo_vmware.rw_handles [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 642.455494] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c724447-a914-41f3-b93f-66709e680f70 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.463968] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d23c6da-a46e-4448-bb31-3bb4fe767384 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.494515] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daf7388-dd44-467a-8d4d-fae20a531562 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.503229] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79683d05-43b9-4bd0-9481-cf3f815a1449 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.519268] env[62346]: DEBUG nova.compute.provider_tree [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.529778] env[62346]: DEBUG nova.scheduler.client.report [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.544786] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.613s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.545351] env[62346]: ERROR nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 642.545351] env[62346]: Faults: ['InvalidArgument'] [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Traceback (most recent call last): [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self.driver.spawn(context, instance, image_meta, [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self._fetch_image_if_missing(context, vi) [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] image_cache(vi, tmp_image_ds_loc) [ 642.545351] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] vm_util.copy_virtual_disk( [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] session._wait_for_task(vmdk_copy_task) [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] return self.wait_for_task(task_ref) [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] return evt.wait() [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] result = hub.switch() [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] return self.greenlet.switch() [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 642.546168] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] self.f(*self.args, **self.kw) [ 642.546855] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 642.546855] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] raise exceptions.translate_fault(task_info.error) [ 642.546855] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 642.546855] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Faults: ['InvalidArgument'] [ 642.546855] env[62346]: ERROR nova.compute.manager [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] [ 642.546855] env[62346]: DEBUG nova.compute.utils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.547888] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Build of instance b3cb6cbd-a508-4b62-a2b0-14d2963884fb was re-scheduled: A specified parameter was not correct: fileType [ 642.547888] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 642.548296] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 642.548473] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 642.548649] env[62346]: DEBUG nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 642.548813] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 643.025895] env[62346]: DEBUG nova.network.neutron [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.036911] env[62346]: INFO nova.compute.manager [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: b3cb6cbd-a508-4b62-a2b0-14d2963884fb] Took 0.49 seconds to deallocate network for instance. [ 643.149395] env[62346]: INFO nova.scheduler.client.report [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Deleted allocations for instance b3cb6cbd-a508-4b62-a2b0-14d2963884fb [ 643.171645] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4b51f8c2-e879-4bb5-a601-d31df9911d4f tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "b3cb6cbd-a508-4b62-a2b0-14d2963884fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.639s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.218460] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 643.277194] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.277468] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.279059] env[62346]: INFO nova.compute.claims [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.743598] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8ac74b-f25e-4298-b0c3-3ca0c7e093f2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.751789] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b51508a-e419-4946-abdf-cb935adb3b43 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.782785] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83889dc3-684d-471f-aaf5-7d7cdeec09ff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.791152] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc95d77-fdd7-45bb-86c5-b1bcfbfa49c5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.808026] env[62346]: DEBUG nova.compute.provider_tree [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.817768] env[62346]: DEBUG nova.scheduler.client.report [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 643.832695] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.555s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.833195] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 643.867715] env[62346]: DEBUG nova.compute.utils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 643.872019] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 643.872019] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 643.880226] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 643.952921] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 643.972675] env[62346]: DEBUG nova.policy [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ad490c92baa4064a1b55f92c0b84563', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfcd954a9e4d47cc893007bfd1c87a93', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 643.988399] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.988640] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.988805] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.988991] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.989296] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.989529] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.990203] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.990407] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.990636] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.990767] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.990983] env[62346]: DEBUG nova.virt.hardware [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.992207] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6984f98-c95f-4a62-aa0f-d0275b592cbc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.001566] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284cdeeb-4cfe-4cee-a8c0-f185b8df930b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.574509] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Successfully created port: 08fe71c7-8ac5-44c2-97f8-abc8a53065d8 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.078282] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9a916406-af80-4be1-bf48-ade97a24ecc5 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "ca26d6e1-b841-490c-bfd6-33351926e630" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.078566] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9a916406-af80-4be1-bf48-ade97a24ecc5 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "ca26d6e1-b841-490c-bfd6-33351926e630" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.565562] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Successfully updated port: 08fe71c7-8ac5-44c2-97f8-abc8a53065d8 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 645.580399] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "refresh_cache-29eba656-6430-4009-8d24-c5a6f33bef95" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.580996] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquired lock "refresh_cache-29eba656-6430-4009-8d24-c5a6f33bef95" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.581282] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.661857] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.818491] env[62346]: DEBUG nova.compute.manager [req-c6831345-d1bc-44fc-94dc-c39e71819565 req-b507b3a4-597c-4e30-8e3f-70c8bfcde6cf service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Received event network-vif-plugged-08fe71c7-8ac5-44c2-97f8-abc8a53065d8 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 645.818637] env[62346]: DEBUG oslo_concurrency.lockutils [req-c6831345-d1bc-44fc-94dc-c39e71819565 req-b507b3a4-597c-4e30-8e3f-70c8bfcde6cf service nova] Acquiring lock "29eba656-6430-4009-8d24-c5a6f33bef95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.818824] env[62346]: DEBUG oslo_concurrency.lockutils [req-c6831345-d1bc-44fc-94dc-c39e71819565 req-b507b3a4-597c-4e30-8e3f-70c8bfcde6cf service nova] Lock "29eba656-6430-4009-8d24-c5a6f33bef95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.819043] env[62346]: DEBUG oslo_concurrency.lockutils [req-c6831345-d1bc-44fc-94dc-c39e71819565 req-b507b3a4-597c-4e30-8e3f-70c8bfcde6cf service nova] Lock "29eba656-6430-4009-8d24-c5a6f33bef95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.819647] env[62346]: DEBUG nova.compute.manager [req-c6831345-d1bc-44fc-94dc-c39e71819565 req-b507b3a4-597c-4e30-8e3f-70c8bfcde6cf service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] No waiting events found dispatching network-vif-plugged-08fe71c7-8ac5-44c2-97f8-abc8a53065d8 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 645.819647] env[62346]: WARNING nova.compute.manager [req-c6831345-d1bc-44fc-94dc-c39e71819565 req-b507b3a4-597c-4e30-8e3f-70c8bfcde6cf service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Received unexpected event network-vif-plugged-08fe71c7-8ac5-44c2-97f8-abc8a53065d8 for instance with vm_state building and task_state spawning. [ 645.921794] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Updating instance_info_cache with network_info: [{"id": "08fe71c7-8ac5-44c2-97f8-abc8a53065d8", "address": "fa:16:3e:08:cf:47", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fe71c7-8a", "ovs_interfaceid": "08fe71c7-8ac5-44c2-97f8-abc8a53065d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.938570] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Releasing lock "refresh_cache-29eba656-6430-4009-8d24-c5a6f33bef95" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.938846] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance network_info: |[{"id": "08fe71c7-8ac5-44c2-97f8-abc8a53065d8", "address": "fa:16:3e:08:cf:47", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fe71c7-8a", "ovs_interfaceid": "08fe71c7-8ac5-44c2-97f8-abc8a53065d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 645.939277] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:cf:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08fe71c7-8ac5-44c2-97f8-abc8a53065d8', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.946628] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Creating folder: Project (cfcd954a9e4d47cc893007bfd1c87a93). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 645.947220] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8db9f843-7023-44ae-b9b6-11f3331275b1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.958606] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Created folder: Project (cfcd954a9e4d47cc893007bfd1c87a93) in parent group-v953204. [ 645.958833] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Creating folder: Instances. Parent ref: group-v953242. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 645.959300] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6709a134-fa88-4fe0-bfc8-a4c9cfc9b1a1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.968648] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Created folder: Instances in parent group-v953242. [ 645.968969] env[62346]: DEBUG oslo.service.loopingcall [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.969180] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 645.969381] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94f1f0aa-bc9a-4e66-ae22-b32f603a65ba {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.989035] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.989035] env[62346]: value = "task-4891623" [ 645.989035] env[62346]: _type = "Task" [ 645.989035] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.997412] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891623, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.499495] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891623, 'name': CreateVM_Task, 'duration_secs': 0.393181} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.499693] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 646.500813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.500813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.500813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 646.501065] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-323a67e9-8b31-452e-a527-dda8bad6cc25 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.506784] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Waiting for the task: (returnval){ [ 646.506784] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ae7400-b0c3-de25-42c0-a75ea5c39e38" [ 646.506784] env[62346]: _type = "Task" [ 646.506784] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.515511] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ae7400-b0c3-de25-42c0-a75ea5c39e38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.017550] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.017938] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 647.018030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.970221] env[62346]: DEBUG nova.compute.manager [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Received event network-changed-08fe71c7-8ac5-44c2-97f8-abc8a53065d8 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 647.970221] env[62346]: DEBUG nova.compute.manager [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Refreshing instance network info cache due to event network-changed-08fe71c7-8ac5-44c2-97f8-abc8a53065d8. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 647.970221] env[62346]: DEBUG oslo_concurrency.lockutils [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] Acquiring lock "refresh_cache-29eba656-6430-4009-8d24-c5a6f33bef95" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.970221] env[62346]: DEBUG oslo_concurrency.lockutils [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] Acquired lock "refresh_cache-29eba656-6430-4009-8d24-c5a6f33bef95" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.970221] env[62346]: DEBUG nova.network.neutron [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Refreshing network info cache for port 08fe71c7-8ac5-44c2-97f8-abc8a53065d8 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 648.396957] env[62346]: DEBUG nova.network.neutron [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Updated VIF entry in instance network info cache for port 08fe71c7-8ac5-44c2-97f8-abc8a53065d8. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 648.397363] env[62346]: DEBUG nova.network.neutron [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Updating instance_info_cache with network_info: [{"id": "08fe71c7-8ac5-44c2-97f8-abc8a53065d8", "address": "fa:16:3e:08:cf:47", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fe71c7-8a", "ovs_interfaceid": "08fe71c7-8ac5-44c2-97f8-abc8a53065d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.408445] env[62346]: DEBUG oslo_concurrency.lockutils [req-5beeebfb-48e9-455c-aadb-ee06b3e4aacc req-c56b1411-8083-4f78-9cc8-2820da4d2039 service nova] Releasing lock "refresh_cache-29eba656-6430-4009-8d24-c5a6f33bef95" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.222648] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.219583] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.233723] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.234116] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.234317] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.234681] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 675.235593] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966684d5-124a-4989-bee6-3ab441699b25 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.244805] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353cf963-d727-4e43-ac01-a78fed44ba26 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.261200] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b70b7e-7280-4655-bd30-828bde4ae76b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.268209] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1828f999-7d86-4252-abd1-05949d00665e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.298304] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180572MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 675.298470] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.298669] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.378031] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ef410c62-2279-4be0-bb88-9fb0735eca19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.378367] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.378628] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.378889] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.379142] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.379359] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67fbd771-9a97-428e-9453-4e1eba7e141d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.379621] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9684739f-82ae-4738-8d27-9d273b547ad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.379838] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.381023] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.381023] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 675.393171] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.409612] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.420283] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3dbb3789-e23a-4810-801e-dbe4f76d97fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.434454] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0a463df-07ca-48b0-9254-a1a746d3f5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.444526] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.454861] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 84536893-aa88-4ac3-8340-509cb9d88088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.465222] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.475464] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daf47742-054a-496d-b754-3a2687ebe973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.488440] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.500339] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6ec381e8-762e-4136-863b-2b1a566abb9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.512681] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 64da10a6-4063-4288-88ab-ae97b8c1fd88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.527462] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 35d268f6-0573-4f9b-85ac-09359c56ef8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.539099] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c48b9d0d-37c3-47bb-9f9a-4055eb607c93 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.549999] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2aa479bb-71e8-4d21-9af8-d2af9d284c14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.560524] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance cc78290c-3615-4e59-b0f2-95d967b07569 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.573196] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d87291e9-b698-4e6c-a265-55c00b863ac1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.582886] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 682b49a4-4901-4c62-9e5a-cf03047e1cb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.592802] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 865c8799-4a94-41af-921c-c4206331ba81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.603315] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0d00a15-7869-4b1a-bf6f-a7f5e1c88138 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.614717] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e2bfd80f-9e03-4dd1-bd28-fb33b54463b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.624876] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6a1f6a2-05be-43d5-a2c3-36bc5a878434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.635922] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 55ab9511-9e85-426e-b6bc-829f88c534f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.646850] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca26d6e1-b841-490c-bfd6-33351926e630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 675.647298] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 675.647509] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_bf244586f5044940ad18cc8fcc81adbf': '1', 'io_workload': '10', 'num_proj_1dda2d64ca9c4dca96b9ed840a531345': '1', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'num_proj_67d7b1e9bec14d2f8be2b7d5f9da1973': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_72448b8fe7224f62a5bc90c77852c499': '1', 'num_proj_b5c07e50d0924dae920d326b8ff7fe33': '1', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 676.067667] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f95c01-cd0d-453f-a779-2ef94f6efeae {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.075647] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4e5cca-bcff-47eb-9951-3c53de2169ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.104898] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b251dda-6c13-4d52-90b2-f60d8f6ababb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.112837] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b0c09a-89b7-4ed7-b314-abfe7697f47c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.126335] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.135379] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.150227] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 676.150496] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.852s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.152200] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.152530] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 677.152619] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 677.173831] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.173983] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174130] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174257] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174379] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174496] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174616] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174733] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174850] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.174963] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 677.175091] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 677.219669] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.219890] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.220052] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.220203] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.220408] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.220592] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.220734] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 690.813987] env[62346]: WARNING oslo_vmware.rw_handles [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 690.813987] env[62346]: ERROR oslo_vmware.rw_handles [ 690.815082] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 690.817222] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 690.817695] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Copying Virtual Disk [datastore2] vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/dd9c3406-6eda-41a3-b73f-c5136b4b52c1/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 690.818237] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2baf1014-e1dc-48bf-9249-651b40d99601 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.827298] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Waiting for the task: (returnval){ [ 690.827298] env[62346]: value = "task-4891624" [ 690.827298] env[62346]: _type = "Task" [ 690.827298] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.835058] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Task: {'id': task-4891624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.337627] env[62346]: DEBUG oslo_vmware.exceptions [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 691.337627] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.338094] env[62346]: ERROR nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 691.338094] env[62346]: Faults: ['InvalidArgument'] [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Traceback (most recent call last): [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] yield resources [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self.driver.spawn(context, instance, image_meta, [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self._fetch_image_if_missing(context, vi) [ 691.338094] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] image_cache(vi, tmp_image_ds_loc) [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] vm_util.copy_virtual_disk( [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] session._wait_for_task(vmdk_copy_task) [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] return self.wait_for_task(task_ref) [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] return evt.wait() [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] result = hub.switch() [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.338575] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] return self.greenlet.switch() [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self.f(*self.args, **self.kw) [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] raise exceptions.translate_fault(task_info.error) [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Faults: ['InvalidArgument'] [ 691.339064] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] [ 691.339064] env[62346]: INFO nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Terminating instance [ 691.340412] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.340412] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 691.340753] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca177c80-c99e-4c6e-a2aa-840860df9faa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.343765] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "refresh_cache-67fbd771-9a97-428e-9453-4e1eba7e141d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.343995] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquired lock "refresh_cache-67fbd771-9a97-428e-9453-4e1eba7e141d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.344286] env[62346]: DEBUG nova.network.neutron [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.352819] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 691.353308] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 691.354194] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f90a970-e72d-462e-9d5c-156dc932e00d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.364271] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Waiting for the task: (returnval){ [ 691.364271] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52a388d7-8d5c-6059-ae48-7dc3d7fa4e08" [ 691.364271] env[62346]: _type = "Task" [ 691.364271] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.373039] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52a388d7-8d5c-6059-ae48-7dc3d7fa4e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.388121] env[62346]: DEBUG nova.network.neutron [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.508034] env[62346]: DEBUG nova.network.neutron [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.520850] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Releasing lock "refresh_cache-67fbd771-9a97-428e-9453-4e1eba7e141d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.521375] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 691.521582] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 691.523099] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed5a0cb-1195-4527-a754-1d0445d7ab25 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.531691] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 691.531938] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-943bea71-8468-406a-b41a-276e1fb8d8c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.622744] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 691.622987] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 691.623300] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Deleting the datastore file [datastore2] 67fbd771-9a97-428e-9453-4e1eba7e141d {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 691.623571] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34df7ef4-6015-49bb-8c9e-36aed809a64b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.632120] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Waiting for the task: (returnval){ [ 691.632120] env[62346]: value = "task-4891626" [ 691.632120] env[62346]: _type = "Task" [ 691.632120] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.640206] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Task: {'id': task-4891626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.874598] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 691.874958] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Creating directory with path [datastore2] vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 691.875097] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78f37b2b-1fcc-4a70-a1bf-749f5b76ff46 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.888155] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Created directory with path [datastore2] vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 691.888371] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Fetch image to [datastore2] vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 691.888592] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 691.889393] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d4a4b7-5bea-4825-9372-90928a2e161b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.897780] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79262153-2144-4cbd-98e2-6f11e76a1063 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.907544] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1211b2-95df-43ff-8bbe-4fddf6413988 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.940030] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa9cafa-d460-4c54-9bc0-1fa657e48d5f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.947647] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a4771b1c-9284-4085-9299-0ef456a6188e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.979136] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 692.033072] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 692.093166] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 692.093367] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 692.148373] env[62346]: DEBUG oslo_vmware.api [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Task: {'id': task-4891626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034312} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.148704] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 692.148949] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 692.150248] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 692.150248] env[62346]: INFO nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Took 0.63 seconds to destroy the instance on the hypervisor. [ 692.150248] env[62346]: DEBUG oslo.service.loopingcall [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.150248] env[62346]: DEBUG nova.compute.manager [-] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 692.152893] env[62346]: DEBUG nova.compute.claims [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 692.153091] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.153309] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.654800] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c4de59-2ad2-41d9-a2c2-c9a6174101d0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.665098] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f2b5a9-1124-4044-ae9d-f257c6ddbb00 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.696069] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40285edd-d934-4bc3-bc56-6b8146d301f5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.704214] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b61482f-889b-4303-9221-3b8744a9b9d6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.719240] env[62346]: DEBUG nova.compute.provider_tree [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.728780] env[62346]: DEBUG nova.scheduler.client.report [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.748562] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.595s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.749114] env[62346]: ERROR nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 692.749114] env[62346]: Faults: ['InvalidArgument'] [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Traceback (most recent call last): [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self.driver.spawn(context, instance, image_meta, [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self._fetch_image_if_missing(context, vi) [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] image_cache(vi, tmp_image_ds_loc) [ 692.749114] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] vm_util.copy_virtual_disk( [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] session._wait_for_task(vmdk_copy_task) [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] return self.wait_for_task(task_ref) [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] return evt.wait() [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] result = hub.switch() [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] return self.greenlet.switch() [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 692.749435] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] self.f(*self.args, **self.kw) [ 692.749715] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 692.749715] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] raise exceptions.translate_fault(task_info.error) [ 692.749715] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 692.749715] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Faults: ['InvalidArgument'] [ 692.749715] env[62346]: ERROR nova.compute.manager [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] [ 692.749829] env[62346]: DEBUG nova.compute.utils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 692.751897] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Build of instance 67fbd771-9a97-428e-9453-4e1eba7e141d was re-scheduled: A specified parameter was not correct: fileType [ 692.751897] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 692.752425] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 692.752767] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquiring lock "refresh_cache-67fbd771-9a97-428e-9453-4e1eba7e141d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.753031] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Acquired lock "refresh_cache-67fbd771-9a97-428e-9453-4e1eba7e141d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.753289] env[62346]: DEBUG nova.network.neutron [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 692.785139] env[62346]: DEBUG nova.network.neutron [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.873404] env[62346]: DEBUG nova.network.neutron [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.883697] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Releasing lock "refresh_cache-67fbd771-9a97-428e-9453-4e1eba7e141d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.884027] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 692.884168] env[62346]: DEBUG nova.compute.manager [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] [instance: 67fbd771-9a97-428e-9453-4e1eba7e141d] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 692.993308] env[62346]: INFO nova.scheduler.client.report [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Deleted allocations for instance 67fbd771-9a97-428e-9453-4e1eba7e141d [ 693.015388] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2ffabbbc-51dc-43df-9d05-a68704e35d98 tempest-ServersAaction247Test-457584686 tempest-ServersAaction247Test-457584686-project-member] Lock "67fbd771-9a97-428e-9453-4e1eba7e141d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.506s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.032458] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 693.091020] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.091351] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.092993] env[62346]: INFO nova.compute.claims [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.560832] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de486eb6-5f87-49b7-9f28-0e766a41f358 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.568650] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0995a04b-f2e6-485d-b80d-bb49e5f3ddca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.599819] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfa6e53-55d0-4181-926a-bfb974859f6b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.608104] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edfc015-d2d3-4c98-ad3d-298d3bb75206 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.622737] env[62346]: DEBUG nova.compute.provider_tree [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.631509] env[62346]: DEBUG nova.scheduler.client.report [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 693.647653] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.556s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.648260] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 693.685887] env[62346]: DEBUG nova.compute.utils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 693.687574] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 693.687753] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 693.700617] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 693.766891] env[62346]: DEBUG nova.policy [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34997955519c4b47bb967d4fd431be32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86b987e66ef94d7ab1a1bdb96ddc4a41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 693.778422] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 693.807149] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 693.807470] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 693.807679] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.807895] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 693.808097] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.808303] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 693.808533] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 693.808699] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 693.808868] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 693.809045] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 693.809230] env[62346]: DEBUG nova.virt.hardware [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 693.810118] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8d7b07-0593-46aa-b10b-c80c564cb071 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.822104] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91205d6-0045-40a0-b9e7-3a9fd4b6f58b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.247216] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Successfully created port: 3d478f85-337f-45cd-be05-74b257cc0788 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.365881] env[62346]: DEBUG nova.compute.manager [req-e6d434ff-00a5-41c5-af73-3cd9ee638427 req-edeb2bbf-f7f0-43e0-b620-5bc609f57e0c service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Received event network-vif-plugged-3d478f85-337f-45cd-be05-74b257cc0788 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 695.365881] env[62346]: DEBUG oslo_concurrency.lockutils [req-e6d434ff-00a5-41c5-af73-3cd9ee638427 req-edeb2bbf-f7f0-43e0-b620-5bc609f57e0c service nova] Acquiring lock "8e698dc0-2883-4987-8baa-f5b6b43fff06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.365881] env[62346]: DEBUG oslo_concurrency.lockutils [req-e6d434ff-00a5-41c5-af73-3cd9ee638427 req-edeb2bbf-f7f0-43e0-b620-5bc609f57e0c service nova] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.365881] env[62346]: DEBUG oslo_concurrency.lockutils [req-e6d434ff-00a5-41c5-af73-3cd9ee638427 req-edeb2bbf-f7f0-43e0-b620-5bc609f57e0c service nova] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.366354] env[62346]: DEBUG nova.compute.manager [req-e6d434ff-00a5-41c5-af73-3cd9ee638427 req-edeb2bbf-f7f0-43e0-b620-5bc609f57e0c service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] No waiting events found dispatching network-vif-plugged-3d478f85-337f-45cd-be05-74b257cc0788 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 695.366354] env[62346]: WARNING nova.compute.manager [req-e6d434ff-00a5-41c5-af73-3cd9ee638427 req-edeb2bbf-f7f0-43e0-b620-5bc609f57e0c service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Received unexpected event network-vif-plugged-3d478f85-337f-45cd-be05-74b257cc0788 for instance with vm_state building and task_state spawning. [ 695.385871] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Successfully updated port: 3d478f85-337f-45cd-be05-74b257cc0788 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 695.406427] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "refresh_cache-8e698dc0-2883-4987-8baa-f5b6b43fff06" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.406583] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquired lock "refresh_cache-8e698dc0-2883-4987-8baa-f5b6b43fff06" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.408146] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 695.473115] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 695.754649] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Updating instance_info_cache with network_info: [{"id": "3d478f85-337f-45cd-be05-74b257cc0788", "address": "fa:16:3e:c0:69:a1", "network": {"id": "62b8d2b1-382d-4e30-af78-47fca4e5f2b6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1543893235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86b987e66ef94d7ab1a1bdb96ddc4a41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d478f85-33", "ovs_interfaceid": "3d478f85-337f-45cd-be05-74b257cc0788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.767273] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Releasing lock "refresh_cache-8e698dc0-2883-4987-8baa-f5b6b43fff06" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.767561] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance network_info: |[{"id": "3d478f85-337f-45cd-be05-74b257cc0788", "address": "fa:16:3e:c0:69:a1", "network": {"id": "62b8d2b1-382d-4e30-af78-47fca4e5f2b6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1543893235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86b987e66ef94d7ab1a1bdb96ddc4a41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d478f85-33", "ovs_interfaceid": "3d478f85-337f-45cd-be05-74b257cc0788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.767987] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:69:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d478f85-337f-45cd-be05-74b257cc0788', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 695.775568] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Creating folder: Project (86b987e66ef94d7ab1a1bdb96ddc4a41). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 695.776157] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-535d1aae-fc08-408c-b846-173446711505 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.789094] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Created folder: Project (86b987e66ef94d7ab1a1bdb96ddc4a41) in parent group-v953204. [ 695.789344] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Creating folder: Instances. Parent ref: group-v953245. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 695.789582] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3062f693-05d0-4680-9b7d-cd5fa1a801cb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.798593] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Created folder: Instances in parent group-v953245. [ 695.798833] env[62346]: DEBUG oslo.service.loopingcall [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.799033] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 695.799238] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e82b3a97-d976-4b1e-b9dd-5ee0975128e5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.819987] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 695.819987] env[62346]: value = "task-4891629" [ 695.819987] env[62346]: _type = "Task" [ 695.819987] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.828692] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891629, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.334127] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891629, 'name': CreateVM_Task, 'duration_secs': 0.303801} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.334127] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 696.334127] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.334127] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.334127] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 696.334410] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7e1637f-71da-4dcf-96ef-a82f8e737642 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.341250] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Waiting for the task: (returnval){ [ 696.341250] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d5f677-3826-0540-881f-d59d8cd563fc" [ 696.341250] env[62346]: _type = "Task" [ 696.341250] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.356637] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d5f677-3826-0540-881f-d59d8cd563fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.851872] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.852668] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 696.853102] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.503626] env[62346]: DEBUG nova.compute.manager [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Received event network-changed-3d478f85-337f-45cd-be05-74b257cc0788 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 697.503859] env[62346]: DEBUG nova.compute.manager [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Refreshing instance network info cache due to event network-changed-3d478f85-337f-45cd-be05-74b257cc0788. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 697.504146] env[62346]: DEBUG oslo_concurrency.lockutils [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] Acquiring lock "refresh_cache-8e698dc0-2883-4987-8baa-f5b6b43fff06" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.504286] env[62346]: DEBUG oslo_concurrency.lockutils [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] Acquired lock "refresh_cache-8e698dc0-2883-4987-8baa-f5b6b43fff06" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.504483] env[62346]: DEBUG nova.network.neutron [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Refreshing network info cache for port 3d478f85-337f-45cd-be05-74b257cc0788 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 697.917400] env[62346]: DEBUG nova.network.neutron [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Updated VIF entry in instance network info cache for port 3d478f85-337f-45cd-be05-74b257cc0788. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 697.917732] env[62346]: DEBUG nova.network.neutron [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Updating instance_info_cache with network_info: [{"id": "3d478f85-337f-45cd-be05-74b257cc0788", "address": "fa:16:3e:c0:69:a1", "network": {"id": "62b8d2b1-382d-4e30-af78-47fca4e5f2b6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1543893235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86b987e66ef94d7ab1a1bdb96ddc4a41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d478f85-33", "ovs_interfaceid": "3d478f85-337f-45cd-be05-74b257cc0788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.929241] env[62346]: DEBUG oslo_concurrency.lockutils [req-68fc953f-b2b2-4607-bab3-cdd082056772 req-adff7cb3-c847-401a-ad09-4503ff5c5ca1 service nova] Releasing lock "refresh_cache-8e698dc0-2883-4987-8baa-f5b6b43fff06" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.162876] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "c72a59f9-220d-4da4-8daa-2724ab255190" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.163520] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "c72a59f9-220d-4da4-8daa-2724ab255190" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.219623] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.219911] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.234721] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.234941] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.235260] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.235437] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 735.236536] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928f8633-c7f7-4276-845b-fedb168b4a5d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.245436] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b88abb-a588-41cb-a3e8-7a18e38f1c37 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.260855] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd01095e-8de1-4595-b7e6-93c836235f31 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.268467] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14dfdca-064f-4cf6-9d9f-8467c5219455 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.297546] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180592MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 735.297727] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.297927] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.379883] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ef410c62-2279-4be0-bb88-9fb0735eca19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380062] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380194] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380316] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380435] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380556] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9684739f-82ae-4738-8d27-9d273b547ad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380672] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380787] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.380900] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.381017] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 735.396623] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.406978] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3dbb3789-e23a-4810-801e-dbe4f76d97fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.418629] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0a463df-07ca-48b0-9254-a1a746d3f5b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.430285] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.440602] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 84536893-aa88-4ac3-8340-509cb9d88088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.451364] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.462504] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daf47742-054a-496d-b754-3a2687ebe973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.472673] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.482495] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6ec381e8-762e-4136-863b-2b1a566abb9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.492631] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 64da10a6-4063-4288-88ab-ae97b8c1fd88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.502901] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 35d268f6-0573-4f9b-85ac-09359c56ef8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.515925] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c48b9d0d-37c3-47bb-9f9a-4055eb607c93 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.527030] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2aa479bb-71e8-4d21-9af8-d2af9d284c14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.539313] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance cc78290c-3615-4e59-b0f2-95d967b07569 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.550512] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d87291e9-b698-4e6c-a265-55c00b863ac1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.562018] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 682b49a4-4901-4c62-9e5a-cf03047e1cb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.571690] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 865c8799-4a94-41af-921c-c4206331ba81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.581834] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0d00a15-7869-4b1a-bf6f-a7f5e1c88138 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.592051] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e2bfd80f-9e03-4dd1-bd28-fb33b54463b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.602421] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6a1f6a2-05be-43d5-a2c3-36bc5a878434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.612355] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 55ab9511-9e85-426e-b6bc-829f88c534f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.624289] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca26d6e1-b841-490c-bfd6-33351926e630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.634829] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 735.635105] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 735.635263] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '3', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_bf244586f5044940ad18cc8fcc81adbf': '1', 'io_workload': '10', 'num_proj_1dda2d64ca9c4dca96b9ed840a531345': '1', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'num_proj_67d7b1e9bec14d2f8be2b7d5f9da1973': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_b5c07e50d0924dae920d326b8ff7fe33': '1', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 736.058527] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcb95f8-06fe-4dd6-9c65-2e324310d9a4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.066840] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e009a482-062a-43f7-98d0-90fa1e47d6f6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.097026] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b6f02a-2aa4-4d06-ac95-284647f47505 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.105251] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c767f87b-5389-4b5c-b195-ebc7522db3af {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.119791] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.133528] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.148770] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 736.149076] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.851s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.145775] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.219588] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.219786] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 737.219884] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 737.243759] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.243948] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244040] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244170] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244292] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244427] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244589] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244706] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244821] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.244934] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 737.245063] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 738.219738] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.220181] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.220181] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 739.216840] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.219801] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.219801] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.219801] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.454880] env[62346]: WARNING oslo_vmware.rw_handles [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 740.454880] env[62346]: ERROR oslo_vmware.rw_handles [ 740.455577] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 740.457598] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 740.457885] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Copying Virtual Disk [datastore2] vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/2ed04d4b-f088-4619-a891-61a76bec7169/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 740.458190] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf7cc645-c1fc-4be2-9a8e-de889b4310ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.467080] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Waiting for the task: (returnval){ [ 740.467080] env[62346]: value = "task-4891630" [ 740.467080] env[62346]: _type = "Task" [ 740.467080] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.476107] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Task: {'id': task-4891630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.979342] env[62346]: DEBUG oslo_vmware.exceptions [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 740.979578] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.980297] env[62346]: ERROR nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 740.980297] env[62346]: Faults: ['InvalidArgument'] [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Traceback (most recent call last): [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] yield resources [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self.driver.spawn(context, instance, image_meta, [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self._fetch_image_if_missing(context, vi) [ 740.980297] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] image_cache(vi, tmp_image_ds_loc) [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] vm_util.copy_virtual_disk( [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] session._wait_for_task(vmdk_copy_task) [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] return self.wait_for_task(task_ref) [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] return evt.wait() [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] result = hub.switch() [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.980716] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] return self.greenlet.switch() [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self.f(*self.args, **self.kw) [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] raise exceptions.translate_fault(task_info.error) [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Faults: ['InvalidArgument'] [ 740.981096] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] [ 740.983275] env[62346]: INFO nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Terminating instance [ 740.983679] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.984091] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 740.984848] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 740.985174] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 740.985508] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2e35146-ad2c-4cf0-88fc-583e58ccb8ef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.988140] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df81419-85cc-49f6-baa0-8dfb770cc76e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.995958] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 740.996512] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51d36f3b-6340-42fe-9809-d502182371ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.000075] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.000075] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 741.000487] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f232509-60ed-49fc-95e4-5999e127221a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.005896] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for the task: (returnval){ [ 741.005896] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52939176-30e2-0b33-8d7b-f002d0c18d31" [ 741.005896] env[62346]: _type = "Task" [ 741.005896] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.016367] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52939176-30e2-0b33-8d7b-f002d0c18d31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.074939] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 741.074939] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 741.074939] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Deleting the datastore file [datastore2] ef410c62-2279-4be0-bb88-9fb0735eca19 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.074939] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39c77f2e-346c-4eb4-8b8b-eff6a15a7016 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.081317] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Waiting for the task: (returnval){ [ 741.081317] env[62346]: value = "task-4891632" [ 741.081317] env[62346]: _type = "Task" [ 741.081317] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.089591] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Task: {'id': task-4891632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.516901] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 741.517210] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Creating directory with path [datastore2] vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.517417] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1889cf4-3e71-4e25-829e-7c5567543e83 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.532452] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Created directory with path [datastore2] vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.532452] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Fetch image to [datastore2] vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 741.532452] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 741.532452] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6032cd41-7753-4917-bcad-4afe3edc4e58 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.538914] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5b2380-fd9f-4abd-a3ea-c68dcac58b09 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.549447] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd95434-d8da-4ef0-9b68-af2c4a774755 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.580314] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa6773a-705e-4240-b47e-6e8d4ff66e30 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.592666] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b3cf21a2-fee9-4030-abfc-5f88eccab9c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.594642] env[62346]: DEBUG oslo_vmware.api [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Task: {'id': task-4891632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084661} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.594896] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.595094] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 741.595267] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 741.595447] env[62346]: INFO nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Took 0.61 seconds to destroy the instance on the hypervisor. [ 741.598273] env[62346]: DEBUG nova.compute.claims [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 741.598409] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.598621] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.631831] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 741.690515] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 741.752200] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 741.752400] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 742.118615] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b8c4f9-c354-4714-91ba-567e68db1bc8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.126685] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c08508-d61f-420b-9087-525e3480851d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.158269] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2950ead1-0d6b-4ea6-85cb-2c036f84987f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.168034] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7371a4d9-7957-49d4-a483-ad73b9f94209 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.181547] env[62346]: DEBUG nova.compute.provider_tree [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.190878] env[62346]: DEBUG nova.scheduler.client.report [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 742.209182] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.610s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.209749] env[62346]: ERROR nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 742.209749] env[62346]: Faults: ['InvalidArgument'] [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Traceback (most recent call last): [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self.driver.spawn(context, instance, image_meta, [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self._fetch_image_if_missing(context, vi) [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] image_cache(vi, tmp_image_ds_loc) [ 742.209749] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] vm_util.copy_virtual_disk( [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] session._wait_for_task(vmdk_copy_task) [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] return self.wait_for_task(task_ref) [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] return evt.wait() [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] result = hub.switch() [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] return self.greenlet.switch() [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 742.210124] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] self.f(*self.args, **self.kw) [ 742.210430] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 742.210430] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] raise exceptions.translate_fault(task_info.error) [ 742.210430] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 742.210430] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Faults: ['InvalidArgument'] [ 742.210430] env[62346]: ERROR nova.compute.manager [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] [ 742.210552] env[62346]: DEBUG nova.compute.utils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 742.212077] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Build of instance ef410c62-2279-4be0-bb88-9fb0735eca19 was re-scheduled: A specified parameter was not correct: fileType [ 742.212077] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 742.212455] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 742.212629] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 742.212788] env[62346]: DEBUG nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 742.212951] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 742.624402] env[62346]: DEBUG nova.network.neutron [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.641721] env[62346]: INFO nova.compute.manager [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] [instance: ef410c62-2279-4be0-bb88-9fb0735eca19] Took 0.43 seconds to deallocate network for instance. [ 742.755464] env[62346]: INFO nova.scheduler.client.report [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Deleted allocations for instance ef410c62-2279-4be0-bb88-9fb0735eca19 [ 742.777810] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c4d6e11-a87d-430d-8835-d1f016d8081c tempest-ServersAdminNegativeTestJSON-400623828 tempest-ServersAdminNegativeTestJSON-400623828-project-member] Lock "ef410c62-2279-4be0-bb88-9fb0735eca19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.537s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.796187] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 742.852926] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.853228] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.855164] env[62346]: INFO nova.compute.claims [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.186894] env[62346]: DEBUG oslo_concurrency.lockutils [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.326393] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b58aa9-cdc1-4571-ba1f-cae7705eb4cc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.335518] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee181e9-a238-4079-bbcd-7ef58fbd18f0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.365624] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f70f120-95a5-4505-a3c6-d6e95a24e587 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.374123] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e4882e-40ae-4829-9256-5bfc568ed26b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.387709] env[62346]: DEBUG nova.compute.provider_tree [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.396661] env[62346]: DEBUG nova.scheduler.client.report [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.414269] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.561s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.414784] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 743.472869] env[62346]: DEBUG nova.compute.utils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.474655] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Not allocating networking since 'none' was specified. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 743.488551] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 743.571919] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 743.598516] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 743.598735] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 743.598900] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.599153] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 743.599313] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.599464] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 743.599677] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 743.599839] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 743.600013] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 743.600195] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 743.600381] env[62346]: DEBUG nova.virt.hardware [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 743.601344] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc40df0-e601-4e9b-aa0b-e7b5cecbd12c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.611194] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8808fb06-b8d9-4852-9f32-b2b3b3cbaab4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.627168] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance VIF info [] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.632914] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Creating folder: Project (da4db2adf7dd413caf5d93fedf5af2dc). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 743.634587] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91788ae1-60b9-4fa6-a7b7-7f93b63008ca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.650588] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Created folder: Project (da4db2adf7dd413caf5d93fedf5af2dc) in parent group-v953204. [ 743.650813] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Creating folder: Instances. Parent ref: group-v953248. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 743.651641] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d150e3b-98d0-4d2c-b1d2-b10ff665f47c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.653561] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.662645] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Created folder: Instances in parent group-v953248. [ 743.662837] env[62346]: DEBUG oslo.service.loopingcall [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.663039] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 743.663535] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25294f57-20cc-4951-bd10-a6c227323903 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.681575] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.681575] env[62346]: value = "task-4891635" [ 743.681575] env[62346]: _type = "Task" [ 743.681575] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.689888] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891635, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.997634] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.198039] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891635, 'name': CreateVM_Task, 'duration_secs': 0.287681} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.198332] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 744.199054] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.199476] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.199940] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 744.200319] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e07e9ecb-1ba9-44f3-93f8-c254987f5ff4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.206767] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Waiting for the task: (returnval){ [ 744.206767] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d5ff9b-fc0b-645d-8e0f-463404bc3137" [ 744.206767] env[62346]: _type = "Task" [ 744.206767] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.220827] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d5ff9b-fc0b-645d-8e0f-463404bc3137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.717254] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.717617] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.717846] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.039322] env[62346]: DEBUG oslo_concurrency.lockutils [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "1157187b-7051-4921-bd95-9ef3e2d17104" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.316287] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "9684739f-82ae-4738-8d27-9d273b547ad6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.732225] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.732520] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.630047] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "a040a266-a77e-4ef4-ac34-df4781f2a757" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.415380] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.182940] env[62346]: DEBUG oslo_concurrency.lockutils [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "29eba656-6430-4009-8d24-c5a6f33bef95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.721482] env[62346]: DEBUG oslo_concurrency.lockutils [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.365443] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.582020] env[62346]: WARNING oslo_vmware.rw_handles [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 790.582020] env[62346]: ERROR oslo_vmware.rw_handles [ 790.582020] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 790.583033] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 790.583033] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Copying Virtual Disk [datastore2] vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/dca651e0-9631-46f4-92da-7ed7076ae230/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 790.583033] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48eaaf49-06e0-4099-ad40-f80098ff3b21 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.592373] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for the task: (returnval){ [ 790.592373] env[62346]: value = "task-4891636" [ 790.592373] env[62346]: _type = "Task" [ 790.592373] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.604118] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Task: {'id': task-4891636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.106848] env[62346]: DEBUG oslo_vmware.exceptions [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 791.107240] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.107742] env[62346]: ERROR nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 791.107742] env[62346]: Faults: ['InvalidArgument'] [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Traceback (most recent call last): [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] yield resources [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self.driver.spawn(context, instance, image_meta, [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self._fetch_image_if_missing(context, vi) [ 791.107742] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] image_cache(vi, tmp_image_ds_loc) [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] vm_util.copy_virtual_disk( [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] session._wait_for_task(vmdk_copy_task) [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] return self.wait_for_task(task_ref) [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] return evt.wait() [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] result = hub.switch() [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 791.108341] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] return self.greenlet.switch() [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self.f(*self.args, **self.kw) [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] raise exceptions.translate_fault(task_info.error) [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Faults: ['InvalidArgument'] [ 791.108696] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] [ 791.108696] env[62346]: INFO nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Terminating instance [ 791.110938] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.110938] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquired lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.110938] env[62346]: DEBUG nova.network.neutron [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 791.115993] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.115993] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.115993] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e6081f2-5d65-491c-82ac-e2f46e9b693d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.129439] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.129439] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 791.130396] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c311b5-fd88-466e-b2b9-11ffa5494d26 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.139525] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for the task: (returnval){ [ 791.139525] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52b38d0a-07ba-bf20-45b1-0620f6ad0508" [ 791.139525] env[62346]: _type = "Task" [ 791.139525] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.154023] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52b38d0a-07ba-bf20-45b1-0620f6ad0508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.174698] env[62346]: DEBUG nova.network.neutron [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 791.393753] env[62346]: DEBUG nova.network.neutron [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.407754] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Releasing lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.408253] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 791.408467] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 791.409888] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d775bf84-5b0e-47be-9e47-3073b1c5b4e8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.419174] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 791.419440] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7b1a6e6-3ff1-4d7a-8900-38e6465e2d53 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.456543] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 791.456830] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 791.456998] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Deleting the datastore file [datastore2] 9684739f-82ae-4738-8d27-9d273b547ad6 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.457308] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46740679-5303-4b9f-9cbf-46c2c9e0af9b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.466924] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for the task: (returnval){ [ 791.466924] env[62346]: value = "task-4891638" [ 791.466924] env[62346]: _type = "Task" [ 791.466924] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.484608] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Task: {'id': task-4891638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.652593] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 791.652941] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Creating directory with path [datastore2] vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.653495] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50508dd3-b20c-4dd9-8f0d-d534762413d8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.672116] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Created directory with path [datastore2] vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.672116] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Fetch image to [datastore2] vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 791.672116] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 791.672116] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dac553-1186-48f6-9fb2-1621c6d17ee6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.684103] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b43f3a2-2e65-43c3-ab06-f571408d1cee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.699047] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372f0d6f-2d04-436e-b7ce-8f7f7ebe68a5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.742989] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b410c8-be52-4182-9a89-dd78e4d7c989 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.751082] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5751adf9-f245-4d0e-bbfd-e7c12dea3188 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.774975] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 791.861247] env[62346]: DEBUG oslo_vmware.rw_handles [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 791.930220] env[62346]: DEBUG oslo_vmware.rw_handles [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 791.931156] env[62346]: DEBUG oslo_vmware.rw_handles [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 791.978369] env[62346]: DEBUG oslo_vmware.api [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Task: {'id': task-4891638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.051585} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.978369] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.978496] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 791.978859] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 791.979086] env[62346]: INFO nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Took 0.57 seconds to destroy the instance on the hypervisor. [ 791.979338] env[62346]: DEBUG oslo.service.loopingcall [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 791.979547] env[62346]: DEBUG nova.compute.manager [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 791.983096] env[62346]: DEBUG nova.compute.claims [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 791.983096] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.983096] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.543347] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f508326-1dfb-4f65-b6cd-6aa172dc7650 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.553467] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c283cc26-7c98-45e0-83a2-75c520c27489 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.599745] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc5d924-1b5d-43ed-bb8a-5a1a3f1bc858 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.617581] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc05bcea-e7b6-45e0-a58b-e55142b41686 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.637302] env[62346]: DEBUG nova.compute.provider_tree [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.652105] env[62346]: DEBUG nova.scheduler.client.report [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.678362] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.695s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.680105] env[62346]: ERROR nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 792.680105] env[62346]: Faults: ['InvalidArgument'] [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Traceback (most recent call last): [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self.driver.spawn(context, instance, image_meta, [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self._fetch_image_if_missing(context, vi) [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] image_cache(vi, tmp_image_ds_loc) [ 792.680105] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] vm_util.copy_virtual_disk( [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] session._wait_for_task(vmdk_copy_task) [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] return self.wait_for_task(task_ref) [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] return evt.wait() [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] result = hub.switch() [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] return self.greenlet.switch() [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 792.680460] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] self.f(*self.args, **self.kw) [ 792.680799] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 792.680799] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] raise exceptions.translate_fault(task_info.error) [ 792.680799] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 792.680799] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Faults: ['InvalidArgument'] [ 792.680799] env[62346]: ERROR nova.compute.manager [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] [ 792.682018] env[62346]: DEBUG nova.compute.utils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 792.685398] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Build of instance 9684739f-82ae-4738-8d27-9d273b547ad6 was re-scheduled: A specified parameter was not correct: fileType [ 792.685398] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 792.685398] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 792.685398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.685398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquired lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.685633] env[62346]: DEBUG nova.network.neutron [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 792.734713] env[62346]: DEBUG nova.network.neutron [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.002648] env[62346]: DEBUG nova.network.neutron [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.016021] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Releasing lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.016283] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 793.016738] env[62346]: DEBUG nova.compute.manager [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 793.173723] env[62346]: INFO nova.scheduler.client.report [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Deleted allocations for instance 9684739f-82ae-4738-8d27-9d273b547ad6 [ 793.227242] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9744c202-0982-46de-bfd1-fd84d7cd2b11 tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "9684739f-82ae-4738-8d27-9d273b547ad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.558s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.228490] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "9684739f-82ae-4738-8d27-9d273b547ad6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 41.912s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.228728] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "9684739f-82ae-4738-8d27-9d273b547ad6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.230084] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "9684739f-82ae-4738-8d27-9d273b547ad6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.230285] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "9684739f-82ae-4738-8d27-9d273b547ad6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.232804] env[62346]: INFO nova.compute.manager [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Terminating instance [ 793.234739] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquiring lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.234905] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Acquired lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.235088] env[62346]: DEBUG nova.network.neutron [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 793.245186] env[62346]: DEBUG nova.compute.manager [None req-70719889-ef6e-4113-92cb-66bc2c91020a tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] [instance: 3dbb3789-e23a-4810-801e-dbe4f76d97fb] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.280983] env[62346]: DEBUG nova.compute.manager [None req-70719889-ef6e-4113-92cb-66bc2c91020a tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] [instance: 3dbb3789-e23a-4810-801e-dbe4f76d97fb] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.291555] env[62346]: DEBUG nova.network.neutron [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.312682] env[62346]: DEBUG oslo_concurrency.lockutils [None req-70719889-ef6e-4113-92cb-66bc2c91020a tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] Lock "3dbb3789-e23a-4810-801e-dbe4f76d97fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.262s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.334712] env[62346]: DEBUG nova.compute.manager [None req-cb7ed128-cc69-4a7c-b2f8-a2a82fafdc3f tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] [instance: f0a463df-07ca-48b0-9254-a1a746d3f5b1] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.371575] env[62346]: DEBUG nova.compute.manager [None req-cb7ed128-cc69-4a7c-b2f8-a2a82fafdc3f tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] [instance: f0a463df-07ca-48b0-9254-a1a746d3f5b1] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.412696] env[62346]: DEBUG oslo_concurrency.lockutils [None req-cb7ed128-cc69-4a7c-b2f8-a2a82fafdc3f tempest-ServersAdminTestJSON-1500978182 tempest-ServersAdminTestJSON-1500978182-project-member] Lock "f0a463df-07ca-48b0-9254-a1a746d3f5b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.832s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.423852] env[62346]: DEBUG nova.compute.manager [None req-c856d94d-a185-45bb-8ef5-c69789df8094 tempest-InstanceActionsV221TestJSON-2087366002 tempest-InstanceActionsV221TestJSON-2087366002-project-member] [instance: ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.463593] env[62346]: DEBUG nova.compute.manager [None req-c856d94d-a185-45bb-8ef5-c69789df8094 tempest-InstanceActionsV221TestJSON-2087366002 tempest-InstanceActionsV221TestJSON-2087366002-project-member] [instance: ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.499648] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c856d94d-a185-45bb-8ef5-c69789df8094 tempest-InstanceActionsV221TestJSON-2087366002 tempest-InstanceActionsV221TestJSON-2087366002-project-member] Lock "ff9e8fca-e137-4c4a-aec5-bacdbd1e04f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.875s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.538625] env[62346]: DEBUG nova.compute.manager [None req-adb57c27-05e0-4c68-acaf-4ec6054ddb86 tempest-ServersWithSpecificFlavorTestJSON-914987821 tempest-ServersWithSpecificFlavorTestJSON-914987821-project-member] [instance: 84536893-aa88-4ac3-8340-509cb9d88088] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.569723] env[62346]: DEBUG nova.compute.manager [None req-adb57c27-05e0-4c68-acaf-4ec6054ddb86 tempest-ServersWithSpecificFlavorTestJSON-914987821 tempest-ServersWithSpecificFlavorTestJSON-914987821-project-member] [instance: 84536893-aa88-4ac3-8340-509cb9d88088] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.602731] env[62346]: DEBUG nova.network.neutron [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.617925] env[62346]: DEBUG oslo_concurrency.lockutils [None req-adb57c27-05e0-4c68-acaf-4ec6054ddb86 tempest-ServersWithSpecificFlavorTestJSON-914987821 tempest-ServersWithSpecificFlavorTestJSON-914987821-project-member] Lock "84536893-aa88-4ac3-8340-509cb9d88088" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.312s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.618474] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Releasing lock "refresh_cache-9684739f-82ae-4738-8d27-9d273b547ad6" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.619340] env[62346]: DEBUG nova.compute.manager [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 793.619340] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 793.621582] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99b17c71-6f28-4e8a-82e5-e46913eeb654 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.632735] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861940f6-2cc7-4c24-9e11-6b8dee6fc3d1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.645813] env[62346]: DEBUG nova.compute.manager [None req-0e170052-420f-4eb3-abeb-f6e480838b41 tempest-ServerPasswordTestJSON-555314818 tempest-ServerPasswordTestJSON-555314818-project-member] [instance: f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.677298] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9684739f-82ae-4738-8d27-9d273b547ad6 could not be found. [ 793.677298] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 793.677462] env[62346]: INFO nova.compute.manager [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Took 0.06 seconds to destroy the instance on the hypervisor. [ 793.677733] env[62346]: DEBUG oslo.service.loopingcall [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.677972] env[62346]: DEBUG nova.compute.manager [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 793.678622] env[62346]: DEBUG nova.network.neutron [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 793.687602] env[62346]: DEBUG nova.compute.manager [None req-0e170052-420f-4eb3-abeb-f6e480838b41 tempest-ServerPasswordTestJSON-555314818 tempest-ServerPasswordTestJSON-555314818-project-member] [instance: f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.716153] env[62346]: DEBUG nova.network.neutron [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.720891] env[62346]: DEBUG oslo_concurrency.lockutils [None req-0e170052-420f-4eb3-abeb-f6e480838b41 tempest-ServerPasswordTestJSON-555314818 tempest-ServerPasswordTestJSON-555314818-project-member] Lock "f1604dfe-1368-48d0-b3c3-c0dd9c24e3c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.525s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.729916] env[62346]: DEBUG nova.network.neutron [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.738051] env[62346]: DEBUG nova.compute.manager [None req-f5a5b1a5-b7de-4867-8706-4c52f2e81245 tempest-ServersV294TestFqdnHostnames-229906635 tempest-ServersV294TestFqdnHostnames-229906635-project-member] [instance: daf47742-054a-496d-b754-3a2687ebe973] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.750198] env[62346]: INFO nova.compute.manager [-] [instance: 9684739f-82ae-4738-8d27-9d273b547ad6] Took 0.07 seconds to deallocate network for instance. [ 793.778219] env[62346]: DEBUG nova.compute.manager [None req-f5a5b1a5-b7de-4867-8706-4c52f2e81245 tempest-ServersV294TestFqdnHostnames-229906635 tempest-ServersV294TestFqdnHostnames-229906635-project-member] [instance: daf47742-054a-496d-b754-3a2687ebe973] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 793.807045] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f5a5b1a5-b7de-4867-8706-4c52f2e81245 tempest-ServersV294TestFqdnHostnames-229906635 tempest-ServersV294TestFqdnHostnames-229906635-project-member] Lock "daf47742-054a-496d-b754-3a2687ebe973" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.038s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.824342] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 793.898019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.898019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.898019] env[62346]: INFO nova.compute.claims [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.908518] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5d032c04-b08f-4ed6-a62e-f405355f723c tempest-ServerShowV257Test-1617424148 tempest-ServerShowV257Test-1617424148-project-member] Lock "9684739f-82ae-4738-8d27-9d273b547ad6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.680s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.224160] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.224513] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 794.243584] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] There are 0 instances to clean {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 794.244135] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.244494] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances with incomplete migration {{(pid=62346) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 794.260491] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.364611] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66619423-e51b-490e-8881-36bb960d8726 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.374025] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ce299b-3330-41a2-bcfc-e6a8d359f036 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.411964] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f3c0bc-7181-4c1c-a15a-f582cf6c7af4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.419411] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3a58b9-3db1-426e-b947-fdd265a2c11c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.435045] env[62346]: DEBUG nova.compute.provider_tree [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.449488] env[62346]: DEBUG nova.scheduler.client.report [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.477760] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.582s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.480090] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 794.573059] env[62346]: DEBUG nova.compute.utils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 794.574475] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 794.574670] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 794.591719] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 794.718937] env[62346]: DEBUG nova.policy [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ec111f0142b464db501ecfa59d9e511', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ea2f35bc89d45a4ade06ca64f5249f7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 794.722879] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 794.748020] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.748020] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.748020] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.748332] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.748332] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.748332] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.749014] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.752088] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.752088] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.752202] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.752354] env[62346]: DEBUG nova.virt.hardware [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.753982] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16271877-fa8b-435a-9e51-dfecc8400bf8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.764469] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9312112-95fc-4045-9f0a-2b9f704b4eef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.847317] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.847317] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.266331] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.285852] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.285852] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.285852] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.285852] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 795.287179] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a68ca1-f4f8-429a-b2fe-bb0a0b894b17 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.302117] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270eb237-b286-4b46-895e-2a8216a2d9a1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.323515] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ce4201-255c-46c1-ad6d-4e985b112db8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.338841] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2f0cd7-7b2c-4e18-8dbe-0659277cc345 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.370988] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180565MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 795.371191] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.371396] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.473605] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473605] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473605] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473605] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473758] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473758] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473758] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473758] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473869] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.473869] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 795.493035] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2aa479bb-71e8-4d21-9af8-d2af9d284c14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.512671] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance cc78290c-3615-4e59-b0f2-95d967b07569 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.528442] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d87291e9-b698-4e6c-a265-55c00b863ac1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.543654] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 682b49a4-4901-4c62-9e5a-cf03047e1cb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.562110] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 865c8799-4a94-41af-921c-c4206331ba81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.593692] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f0d00a15-7869-4b1a-bf6f-a7f5e1c88138 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.608513] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e2bfd80f-9e03-4dd1-bd28-fb33b54463b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.628521] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6a1f6a2-05be-43d5-a2c3-36bc5a878434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.652140] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 55ab9511-9e85-426e-b6bc-829f88c534f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.665548] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca26d6e1-b841-490c-bfd6-33351926e630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.681908] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.705644] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.716020] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Successfully created port: b0f5360f-a265-46be-b0f8-8d7d51ef1641 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.724794] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 795.724794] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 795.724794] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '11', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_1dda2d64ca9c4dca96b9ed840a531345': '1', 'io_workload': '10', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'num_proj_67d7b1e9bec14d2f8be2b7d5f9da1973': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_task_spawning': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 796.113464] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "4d8eeb53-06e4-423f-8719-10f5283175b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.113464] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.164007] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eb07f9-bd61-486b-8599-66a3d61800e0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.172861] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292aef5c-70db-42cb-a643-6e5481350365 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.211452] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79782884-f722-449f-8541-e04b6153a8d4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.224902] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c2de79-6a0d-498a-8c8a-756bc134bb0d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.244509] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.262352] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.294668] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 796.294819] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.923s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.953485] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "3bca1346-07e6-4514-8ea0-5783b9640849" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.228590] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dcc006c1-13d0-4ff0-947c-14609f9ed98b tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] Acquiring lock "9feb7773-2b61-464f-878b-0a8ee21a22c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.228847] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dcc006c1-13d0-4ff0-947c-14609f9ed98b tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] Lock "9feb7773-2b61-464f-878b-0a8ee21a22c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.248843] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.347426] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Successfully updated port: b0f5360f-a265-46be-b0f8-8d7d51ef1641 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.369330] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.369330] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquired lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.369330] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 797.460327] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.767016] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Updating instance_info_cache with network_info: [{"id": "b0f5360f-a265-46be-b0f8-8d7d51ef1641", "address": "fa:16:3e:bb:2a:61", "network": {"id": "da698b61-bd74-458d-8649-02ac47e2d443", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1399162256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ea2f35bc89d45a4ade06ca64f5249f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0f5360f-a2", "ovs_interfaceid": "b0f5360f-a265-46be-b0f8-8d7d51ef1641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.781207] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Releasing lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.781511] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance network_info: |[{"id": "b0f5360f-a265-46be-b0f8-8d7d51ef1641", "address": "fa:16:3e:bb:2a:61", "network": {"id": "da698b61-bd74-458d-8649-02ac47e2d443", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1399162256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ea2f35bc89d45a4ade06ca64f5249f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0f5360f-a2", "ovs_interfaceid": "b0f5360f-a265-46be-b0f8-8d7d51ef1641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 797.781916] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:2a:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86b8f7fc-c105-4bcb-a4ec-c363ed38b17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0f5360f-a265-46be-b0f8-8d7d51ef1641', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.789786] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Creating folder: Project (9ea2f35bc89d45a4ade06ca64f5249f7). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 797.790404] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5408881d-92f2-47d6-8793-014fed55e856 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.803233] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Created folder: Project (9ea2f35bc89d45a4ade06ca64f5249f7) in parent group-v953204. [ 797.803233] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Creating folder: Instances. Parent ref: group-v953251. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 797.803233] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecefdf3c-c9b7-4bf4-95f6-1f73f5e18576 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.815520] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Created folder: Instances in parent group-v953251. [ 797.816024] env[62346]: DEBUG oslo.service.loopingcall [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.816024] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 797.816181] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c32d1f2f-8e18-41c2-8756-fcd984d4ddee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.838979] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.838979] env[62346]: value = "task-4891641" [ 797.838979] env[62346]: _type = "Task" [ 797.838979] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.849352] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891641, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.223124] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.223124] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 798.223124] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 798.262287] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.262384] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.262566] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.263056] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.263430] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.263643] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.263808] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.263963] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.264135] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.264299] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 798.264737] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 798.265491] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.354445] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891641, 'name': CreateVM_Task, 'duration_secs': 0.374853} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.354988] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 798.356030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.356140] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.356658] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 798.356870] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c2fab17-2bee-40bf-bb49-db3106641451 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.363486] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Waiting for the task: (returnval){ [ 798.363486] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ee2da5-8340-1b62-fb48-28182ddbe692" [ 798.363486] env[62346]: _type = "Task" [ 798.363486] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.373543] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ee2da5-8340-1b62-fb48-28182ddbe692, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.675248] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "c3c4fb8b-3897-4c85-b40c-710dc4d1fb16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.675492] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "c3c4fb8b-3897-4c85-b40c-710dc4d1fb16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.712613] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "92241377-06e3-41e1-bae5-718f1ae5908b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.713143] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "92241377-06e3-41e1-bae5-718f1ae5908b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.876665] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.877191] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.877553] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.899037] env[62346]: DEBUG nova.compute.manager [req-17995d37-f2ec-449e-837a-fc3c93dea017 req-f12e95cc-cc1f-4d42-97d5-06e0c981303e service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Received event network-vif-plugged-b0f5360f-a265-46be-b0f8-8d7d51ef1641 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 798.899180] env[62346]: DEBUG oslo_concurrency.lockutils [req-17995d37-f2ec-449e-837a-fc3c93dea017 req-f12e95cc-cc1f-4d42-97d5-06e0c981303e service nova] Acquiring lock "3bca1346-07e6-4514-8ea0-5783b9640849-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.899448] env[62346]: DEBUG oslo_concurrency.lockutils [req-17995d37-f2ec-449e-837a-fc3c93dea017 req-f12e95cc-cc1f-4d42-97d5-06e0c981303e service nova] Lock "3bca1346-07e6-4514-8ea0-5783b9640849-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.899551] env[62346]: DEBUG oslo_concurrency.lockutils [req-17995d37-f2ec-449e-837a-fc3c93dea017 req-f12e95cc-cc1f-4d42-97d5-06e0c981303e service nova] Lock "3bca1346-07e6-4514-8ea0-5783b9640849-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.899734] env[62346]: DEBUG nova.compute.manager [req-17995d37-f2ec-449e-837a-fc3c93dea017 req-f12e95cc-cc1f-4d42-97d5-06e0c981303e service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] No waiting events found dispatching network-vif-plugged-b0f5360f-a265-46be-b0f8-8d7d51ef1641 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 798.899901] env[62346]: WARNING nova.compute.manager [req-17995d37-f2ec-449e-837a-fc3c93dea017 req-f12e95cc-cc1f-4d42-97d5-06e0c981303e service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Received unexpected event network-vif-plugged-b0f5360f-a265-46be-b0f8-8d7d51ef1641 for instance with vm_state building and task_state deleting. [ 800.220734] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.221266] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.221396] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.221498] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 800.424146] env[62346]: DEBUG oslo_concurrency.lockutils [None req-523e7672-eed8-4dde-8e5d-6b85c3289a9f tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] Acquiring lock "69db1596-7767-40cc-9872-45574c4f681e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.424666] env[62346]: DEBUG oslo_concurrency.lockutils [None req-523e7672-eed8-4dde-8e5d-6b85c3289a9f tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] Lock "69db1596-7767-40cc-9872-45574c4f681e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.221070] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.221070] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.943081] env[62346]: DEBUG nova.compute.manager [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Received event network-changed-b0f5360f-a265-46be-b0f8-8d7d51ef1641 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 801.943081] env[62346]: DEBUG nova.compute.manager [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Refreshing instance network info cache due to event network-changed-b0f5360f-a265-46be-b0f8-8d7d51ef1641. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 801.943081] env[62346]: DEBUG oslo_concurrency.lockutils [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] Acquiring lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.943081] env[62346]: DEBUG oslo_concurrency.lockutils [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] Acquired lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.943081] env[62346]: DEBUG nova.network.neutron [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Refreshing network info cache for port b0f5360f-a265-46be-b0f8-8d7d51ef1641 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 802.812924] env[62346]: DEBUG nova.network.neutron [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Updated VIF entry in instance network info cache for port b0f5360f-a265-46be-b0f8-8d7d51ef1641. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 802.813417] env[62346]: DEBUG nova.network.neutron [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Updating instance_info_cache with network_info: [{"id": "b0f5360f-a265-46be-b0f8-8d7d51ef1641", "address": "fa:16:3e:bb:2a:61", "network": {"id": "da698b61-bd74-458d-8649-02ac47e2d443", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1399162256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ea2f35bc89d45a4ade06ca64f5249f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86b8f7fc-c105-4bcb-a4ec-c363ed38b17a", "external-id": "nsx-vlan-transportzone-830", "segmentation_id": 830, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0f5360f-a2", "ovs_interfaceid": "b0f5360f-a265-46be-b0f8-8d7d51ef1641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.828138] env[62346]: DEBUG oslo_concurrency.lockutils [req-6ba13a5f-7e3d-48ec-b909-60d961b6b5f5 req-dd2d12fd-3b22-493d-904e-bd1a771d531b service nova] Releasing lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.559662] env[62346]: DEBUG oslo_concurrency.lockutils [None req-94660edd-3bc7-4245-9207-432940d77db4 tempest-ServerMetadataTestJSON-951334850 tempest-ServerMetadataTestJSON-951334850-project-member] Acquiring lock "0889843e-cf4a-4ab3-9702-a2599fac93ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.559662] env[62346]: DEBUG oslo_concurrency.lockutils [None req-94660edd-3bc7-4245-9207-432940d77db4 tempest-ServerMetadataTestJSON-951334850 tempest-ServerMetadataTestJSON-951334850-project-member] Lock "0889843e-cf4a-4ab3-9702-a2599fac93ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.889864] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8f69016-ad1f-4512-981b-91171ed5c403 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "6f96e666-ac1d-48a1-b663-86f1bb9b64d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.890424] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8f69016-ad1f-4512-981b-91171ed5c403 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "6f96e666-ac1d-48a1-b663-86f1bb9b64d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.838175] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d97360ba-a209-4894-afb7-36665ef1afde tempest-ServersTestFqdnHostnames-80151842 tempest-ServersTestFqdnHostnames-80151842-project-member] Acquiring lock "823c0133-92a4-4f86-9df5-7fdf57ccc9f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.838570] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d97360ba-a209-4894-afb7-36665ef1afde tempest-ServersTestFqdnHostnames-80151842 tempest-ServersTestFqdnHostnames-80151842-project-member] Lock "823c0133-92a4-4f86-9df5-7fdf57ccc9f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.346782] env[62346]: DEBUG oslo_concurrency.lockutils [None req-688d0e48-23e1-46c3-a79b-f3bd2d67257b tempest-ServersTestJSON-1916021883 tempest-ServersTestJSON-1916021883-project-member] Acquiring lock "7f1f9951-6292-4b19-8567-16495b9f90cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.346782] env[62346]: DEBUG oslo_concurrency.lockutils [None req-688d0e48-23e1-46c3-a79b-f3bd2d67257b tempest-ServersTestJSON-1916021883 tempest-ServersTestJSON-1916021883-project-member] Lock "7f1f9951-6292-4b19-8567-16495b9f90cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.036860] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1346ee98-66e6-4967-bfb9-ef5923f12753 tempest-ServerActionsTestOtherB-773440793 tempest-ServerActionsTestOtherB-773440793-project-member] Acquiring lock "1ee08a27-8aa5-49b8-abf7-7d69dee25a4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.036860] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1346ee98-66e6-4967-bfb9-ef5923f12753 tempest-ServerActionsTestOtherB-773440793 tempest-ServerActionsTestOtherB-773440793-project-member] Lock "1ee08a27-8aa5-49b8-abf7-7d69dee25a4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.038263] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3c672a6b-7f00-4ad4-98c8-ccf7ada2f880 tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] Acquiring lock "4b4ad569-a890-4ade-aa72-313f8d87d430" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.039029] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3c672a6b-7f00-4ad4-98c8-ccf7ada2f880 tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] Lock "4b4ad569-a890-4ade-aa72-313f8d87d430" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.195821] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9c7bb4b6-829f-4df9-a3a8-aef957e7458f tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] Acquiring lock "d8fa321b-9703-4b46-b2c3-5889b03a9116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.196135] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9c7bb4b6-829f-4df9-a3a8-aef957e7458f tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] Lock "d8fa321b-9703-4b46-b2c3-5889b03a9116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.215042] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07ffd870-8465-4f81-93eb-e5028eb82d69 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Acquiring lock "40b09c7e-1206-4b4c-a1a6-e556a5af2743" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.215490] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07ffd870-8465-4f81-93eb-e5028eb82d69 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Lock "40b09c7e-1206-4b4c-a1a6-e556a5af2743" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.489231] env[62346]: WARNING oslo_vmware.rw_handles [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 840.489231] env[62346]: ERROR oslo_vmware.rw_handles [ 840.489231] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 840.490783] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 840.491084] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Copying Virtual Disk [datastore2] vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/6c8d6be5-0539-4861-b846-a1e7566be280/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 840.491405] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af9735e4-b7aa-4da3-a5ae-4739c685396a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.504144] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for the task: (returnval){ [ 840.504144] env[62346]: value = "task-4891642" [ 840.504144] env[62346]: _type = "Task" [ 840.504144] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.513935] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Task: {'id': task-4891642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.014980] env[62346]: DEBUG oslo_vmware.exceptions [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 841.015301] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.015871] env[62346]: ERROR nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 841.015871] env[62346]: Faults: ['InvalidArgument'] [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Traceback (most recent call last): [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] yield resources [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self.driver.spawn(context, instance, image_meta, [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self._vmops.spawn(context, instance, image_meta, injected_files, [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self._fetch_image_if_missing(context, vi) [ 841.015871] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] image_cache(vi, tmp_image_ds_loc) [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] vm_util.copy_virtual_disk( [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] session._wait_for_task(vmdk_copy_task) [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] return self.wait_for_task(task_ref) [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] return evt.wait() [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] result = hub.switch() [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 841.016279] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] return self.greenlet.switch() [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self.f(*self.args, **self.kw) [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] raise exceptions.translate_fault(task_info.error) [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Faults: ['InvalidArgument'] [ 841.016659] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] [ 841.016659] env[62346]: INFO nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Terminating instance [ 841.017815] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.018028] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.018278] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcbeeb6e-ce5d-429c-a051-8dc858227bdf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.020747] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 841.020989] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 841.021769] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7ac0b0-edf7-47ef-84bf-f4e179b24620 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.029817] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 841.030089] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-393472dd-ebd0-4158-8218-26248d6e48a8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.032603] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.032781] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 841.033807] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e753aeb7-be90-478a-a252-351ef253b9a6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.039650] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Waiting for the task: (returnval){ [ 841.039650] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523b60c7-f77f-aca4-5d2b-fa9f7b859fad" [ 841.039650] env[62346]: _type = "Task" [ 841.039650] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.047903] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523b60c7-f77f-aca4-5d2b-fa9f7b859fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.119898] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 841.120180] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 841.120363] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Deleting the datastore file [datastore2] f71cb62c-8dc2-4dcc-9da4-2f26c0960531 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.120639] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d81fb41c-60d8-4a05-bff9-3db4bbab6df5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.127409] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for the task: (returnval){ [ 841.127409] env[62346]: value = "task-4891644" [ 841.127409] env[62346]: _type = "Task" [ 841.127409] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.136477] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Task: {'id': task-4891644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.550457] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 841.550763] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Creating directory with path [datastore2] vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.550955] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-737cdfd4-b3f8-4413-82e2-bf6e4e8b28be {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.563905] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Created directory with path [datastore2] vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.564095] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Fetch image to [datastore2] vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 841.564273] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 841.565051] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d73c1b4-4245-4c1f-8b3e-6675ea2d808f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.572764] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2aed575-2b76-4dc8-af4e-5c6998eb23ce {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.582509] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6324aad-6cb7-424b-94cd-04bd7042aa2b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.617513] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410b16ca-0a03-4a42-a538-9d375c3e5223 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.622192] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8454f8f7-44a0-45db-acbf-c47f6ae98739 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.636565] env[62346]: DEBUG oslo_vmware.api [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Task: {'id': task-4891644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087674} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.636837] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 841.637027] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 841.637203] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 841.637374] env[62346]: INFO nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Took 0.62 seconds to destroy the instance on the hypervisor. [ 841.639627] env[62346]: DEBUG nova.compute.claims [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 841.639802] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.640057] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.648208] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 841.800816] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 841.857616] env[62346]: DEBUG nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Refreshing inventories for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 841.862339] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 841.862557] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 841.879171] env[62346]: DEBUG nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Updating ProviderTree inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 841.879584] env[62346]: DEBUG nova.compute.provider_tree [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.892436] env[62346]: DEBUG nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Refreshing aggregate associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, aggregates: None {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 841.910529] env[62346]: DEBUG nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Refreshing trait associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 842.062202] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ba18bcbd-7386-486c-9d9a-f2bd47db6909 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "c3c27528-211f-4c7b-ad25-fb6f2d8c7faf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.062449] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ba18bcbd-7386-486c-9d9a-f2bd47db6909 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "c3c27528-211f-4c7b-ad25-fb6f2d8c7faf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.261045] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73354581-4481-47aa-9e4e-ef27436837ff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.269214] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2fce8b-6026-436b-b41c-0175955654c6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.301324] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18158b07-a5c7-4c58-8ad4-4f703e70f1df {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.309364] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4870d8-3e73-41a8-8e5a-50e42ea8dbe5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.325319] env[62346]: DEBUG nova.compute.provider_tree [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.335402] env[62346]: DEBUG nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 842.352836] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.713s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.354444] env[62346]: ERROR nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 842.354444] env[62346]: Faults: ['InvalidArgument'] [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Traceback (most recent call last): [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self.driver.spawn(context, instance, image_meta, [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self._vmops.spawn(context, instance, image_meta, injected_files, [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self._fetch_image_if_missing(context, vi) [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] image_cache(vi, tmp_image_ds_loc) [ 842.354444] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] vm_util.copy_virtual_disk( [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] session._wait_for_task(vmdk_copy_task) [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] return self.wait_for_task(task_ref) [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] return evt.wait() [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] result = hub.switch() [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] return self.greenlet.switch() [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 842.354909] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] self.f(*self.args, **self.kw) [ 842.355384] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 842.355384] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] raise exceptions.translate_fault(task_info.error) [ 842.355384] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 842.355384] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Faults: ['InvalidArgument'] [ 842.355384] env[62346]: ERROR nova.compute.manager [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] [ 842.355384] env[62346]: DEBUG nova.compute.utils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 842.356667] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Build of instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 was re-scheduled: A specified parameter was not correct: fileType [ 842.356667] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 842.357052] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 842.357232] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 842.357384] env[62346]: DEBUG nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 842.357545] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 842.974290] env[62346]: DEBUG nova.network.neutron [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.990881] env[62346]: INFO nova.compute.manager [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Took 0.63 seconds to deallocate network for instance. [ 843.111075] env[62346]: INFO nova.scheduler.client.report [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Deleted allocations for instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 [ 843.136148] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7fe06bbc-7b62-4a02-b0e0-a0e0584495bf tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 297.822s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.137605] env[62346]: DEBUG oslo_concurrency.lockutils [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 99.951s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.138160] env[62346]: DEBUG oslo_concurrency.lockutils [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.140027] env[62346]: DEBUG oslo_concurrency.lockutils [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.140027] env[62346]: DEBUG oslo_concurrency.lockutils [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.141037] env[62346]: INFO nova.compute.manager [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Terminating instance [ 843.146916] env[62346]: DEBUG nova.compute.manager [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 843.147166] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.148031] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36b7fc35-9d95-418a-a781-973e4d887275 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.157096] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c1cb3b-c7be-48fc-ac93-6ef6928a9e3f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.169450] env[62346]: DEBUG nova.compute.manager [None req-b7f44b87-01b8-4426-a290-0ce6f1918436 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 6ec381e8-762e-4136-863b-2b1a566abb9a] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.194502] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f71cb62c-8dc2-4dcc-9da4-2f26c0960531 could not be found. [ 843.194826] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.195065] env[62346]: INFO nova.compute.manager [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Took 0.05 seconds to destroy the instance on the hypervisor. [ 843.195352] env[62346]: DEBUG oslo.service.loopingcall [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.196082] env[62346]: DEBUG nova.compute.manager [-] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 843.196082] env[62346]: DEBUG nova.network.neutron [-] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.206474] env[62346]: DEBUG nova.compute.manager [None req-b7f44b87-01b8-4426-a290-0ce6f1918436 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 6ec381e8-762e-4136-863b-2b1a566abb9a] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.228540] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b7f44b87-01b8-4426-a290-0ce6f1918436 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "6ec381e8-762e-4136-863b-2b1a566abb9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 254.711s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.239957] env[62346]: DEBUG nova.compute.manager [None req-f9776e03-d554-4643-9056-c2f9cec39b1e tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 64da10a6-4063-4288-88ab-ae97b8c1fd88] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.276991] env[62346]: DEBUG nova.compute.manager [None req-f9776e03-d554-4643-9056-c2f9cec39b1e tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 64da10a6-4063-4288-88ab-ae97b8c1fd88] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.302153] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f9776e03-d554-4643-9056-c2f9cec39b1e tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "64da10a6-4063-4288-88ab-ae97b8c1fd88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 252.577s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.314684] env[62346]: DEBUG nova.compute.manager [None req-efa89789-c7d6-460d-bd4d-00fd225dbf4d tempest-FloatingIPsAssociationTestJSON-675611170 tempest-FloatingIPsAssociationTestJSON-675611170-project-member] [instance: 35d268f6-0573-4f9b-85ac-09359c56ef8d] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.327782] env[62346]: DEBUG nova.network.neutron [-] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.338514] env[62346]: INFO nova.compute.manager [-] [instance: f71cb62c-8dc2-4dcc-9da4-2f26c0960531] Took 0.14 seconds to deallocate network for instance. [ 843.345263] env[62346]: DEBUG nova.compute.manager [None req-efa89789-c7d6-460d-bd4d-00fd225dbf4d tempest-FloatingIPsAssociationTestJSON-675611170 tempest-FloatingIPsAssociationTestJSON-675611170-project-member] [instance: 35d268f6-0573-4f9b-85ac-09359c56ef8d] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.368846] env[62346]: DEBUG oslo_concurrency.lockutils [None req-efa89789-c7d6-460d-bd4d-00fd225dbf4d tempest-FloatingIPsAssociationTestJSON-675611170 tempest-FloatingIPsAssociationTestJSON-675611170-project-member] Lock "35d268f6-0573-4f9b-85ac-09359c56ef8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 249.550s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.393039] env[62346]: DEBUG nova.compute.manager [None req-124b6992-e84d-4e25-ae7d-dc5cc6bb9eb8 tempest-AttachInterfacesV270Test-1166619584 tempest-AttachInterfacesV270Test-1166619584-project-member] [instance: c48b9d0d-37c3-47bb-9f9a-4055eb607c93] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.421938] env[62346]: DEBUG nova.compute.manager [None req-124b6992-e84d-4e25-ae7d-dc5cc6bb9eb8 tempest-AttachInterfacesV270Test-1166619584 tempest-AttachInterfacesV270Test-1166619584-project-member] [instance: c48b9d0d-37c3-47bb-9f9a-4055eb607c93] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.441970] env[62346]: DEBUG oslo_concurrency.lockutils [None req-124b6992-e84d-4e25-ae7d-dc5cc6bb9eb8 tempest-AttachInterfacesV270Test-1166619584 tempest-AttachInterfacesV270Test-1166619584-project-member] Lock "c48b9d0d-37c3-47bb-9f9a-4055eb607c93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 249.290s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.461156] env[62346]: DEBUG nova.compute.manager [None req-3947c1a3-1ea7-4423-b658-41f93041f8e6 tempest-AttachInterfacesUnderV243Test-819989224 tempest-AttachInterfacesUnderV243Test-819989224-project-member] [instance: 2aa479bb-71e8-4d21-9af8-d2af9d284c14] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.473980] env[62346]: DEBUG oslo_concurrency.lockutils [None req-19e7e39d-b956-44eb-8b8c-6ee8c6201d38 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "f71cb62c-8dc2-4dcc-9da4-2f26c0960531" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.336s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.488446] env[62346]: DEBUG nova.compute.manager [None req-3947c1a3-1ea7-4423-b658-41f93041f8e6 tempest-AttachInterfacesUnderV243Test-819989224 tempest-AttachInterfacesUnderV243Test-819989224-project-member] [instance: 2aa479bb-71e8-4d21-9af8-d2af9d284c14] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.508813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3947c1a3-1ea7-4423-b658-41f93041f8e6 tempest-AttachInterfacesUnderV243Test-819989224 tempest-AttachInterfacesUnderV243Test-819989224-project-member] Lock "2aa479bb-71e8-4d21-9af8-d2af9d284c14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.294s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.518851] env[62346]: DEBUG nova.compute.manager [None req-b7028a10-40f8-482d-86df-f73db6033fe7 tempest-ServersTestBootFromVolume-936315030 tempest-ServersTestBootFromVolume-936315030-project-member] [instance: cc78290c-3615-4e59-b0f2-95d967b07569] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.565804] env[62346]: DEBUG nova.compute.manager [None req-b7028a10-40f8-482d-86df-f73db6033fe7 tempest-ServersTestBootFromVolume-936315030 tempest-ServersTestBootFromVolume-936315030-project-member] [instance: cc78290c-3615-4e59-b0f2-95d967b07569] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.590309] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b7028a10-40f8-482d-86df-f73db6033fe7 tempest-ServersTestBootFromVolume-936315030 tempest-ServersTestBootFromVolume-936315030-project-member] Lock "cc78290c-3615-4e59-b0f2-95d967b07569" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.780s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.629065] env[62346]: DEBUG nova.compute.manager [None req-85a434c8-5c4f-41b1-9cf5-16e84341eebe tempest-ServersTestManualDisk-608428885 tempest-ServersTestManualDisk-608428885-project-member] [instance: d87291e9-b698-4e6c-a265-55c00b863ac1] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.655396] env[62346]: DEBUG nova.compute.manager [None req-85a434c8-5c4f-41b1-9cf5-16e84341eebe tempest-ServersTestManualDisk-608428885 tempest-ServersTestManualDisk-608428885-project-member] [instance: d87291e9-b698-4e6c-a265-55c00b863ac1] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.681462] env[62346]: DEBUG oslo_concurrency.lockutils [None req-85a434c8-5c4f-41b1-9cf5-16e84341eebe tempest-ServersTestManualDisk-608428885 tempest-ServersTestManualDisk-608428885-project-member] Lock "d87291e9-b698-4e6c-a265-55c00b863ac1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.838s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.691788] env[62346]: DEBUG nova.compute.manager [None req-da74f336-8254-4aaa-a28a-803daf3e111d tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 682b49a4-4901-4c62-9e5a-cf03047e1cb8] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.717528] env[62346]: DEBUG nova.compute.manager [None req-da74f336-8254-4aaa-a28a-803daf3e111d tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 682b49a4-4901-4c62-9e5a-cf03047e1cb8] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.740456] env[62346]: DEBUG oslo_concurrency.lockutils [None req-da74f336-8254-4aaa-a28a-803daf3e111d tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "682b49a4-4901-4c62-9e5a-cf03047e1cb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.851s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.749482] env[62346]: DEBUG nova.compute.manager [None req-03b9db48-34b0-49fe-9cdf-c178e57c096a tempest-ServerTagsTestJSON-173515555 tempest-ServerTagsTestJSON-173515555-project-member] [instance: 865c8799-4a94-41af-921c-c4206331ba81] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.772481] env[62346]: DEBUG nova.compute.manager [None req-03b9db48-34b0-49fe-9cdf-c178e57c096a tempest-ServerTagsTestJSON-173515555 tempest-ServerTagsTestJSON-173515555-project-member] [instance: 865c8799-4a94-41af-921c-c4206331ba81] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.796887] env[62346]: DEBUG oslo_concurrency.lockutils [None req-03b9db48-34b0-49fe-9cdf-c178e57c096a tempest-ServerTagsTestJSON-173515555 tempest-ServerTagsTestJSON-173515555-project-member] Lock "865c8799-4a94-41af-921c-c4206331ba81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.163s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.809062] env[62346]: DEBUG nova.compute.manager [None req-50e9d95b-322e-4559-9767-a3101fa00193 tempest-ServersNegativeTestJSON-1450782741 tempest-ServersNegativeTestJSON-1450782741-project-member] [instance: f0d00a15-7869-4b1a-bf6f-a7f5e1c88138] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.832188] env[62346]: DEBUG nova.compute.manager [None req-50e9d95b-322e-4559-9767-a3101fa00193 tempest-ServersNegativeTestJSON-1450782741 tempest-ServersNegativeTestJSON-1450782741-project-member] [instance: f0d00a15-7869-4b1a-bf6f-a7f5e1c88138] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.853508] env[62346]: DEBUG oslo_concurrency.lockutils [None req-50e9d95b-322e-4559-9767-a3101fa00193 tempest-ServersNegativeTestJSON-1450782741 tempest-ServersNegativeTestJSON-1450782741-project-member] Lock "f0d00a15-7869-4b1a-bf6f-a7f5e1c88138" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.545s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.864287] env[62346]: DEBUG nova.compute.manager [None req-3ca091da-3cc6-4226-b25e-19f78f96bf8b tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] [instance: e2bfd80f-9e03-4dd1-bd28-fb33b54463b9] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.890474] env[62346]: DEBUG nova.compute.manager [None req-3ca091da-3cc6-4226-b25e-19f78f96bf8b tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] [instance: e2bfd80f-9e03-4dd1-bd28-fb33b54463b9] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.914641] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3ca091da-3cc6-4226-b25e-19f78f96bf8b tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] Lock "e2bfd80f-9e03-4dd1-bd28-fb33b54463b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.631s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.924746] env[62346]: DEBUG nova.compute.manager [None req-8c741b35-9ddc-4760-8ba8-14e510d6ce82 tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] [instance: c6a1f6a2-05be-43d5-a2c3-36bc5a878434] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 843.950499] env[62346]: DEBUG nova.compute.manager [None req-8c741b35-9ddc-4760-8ba8-14e510d6ce82 tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] [instance: c6a1f6a2-05be-43d5-a2c3-36bc5a878434] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 843.976631] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8c741b35-9ddc-4760-8ba8-14e510d6ce82 tempest-ListImageFiltersTestJSON-968801973 tempest-ListImageFiltersTestJSON-968801973-project-member] Lock "c6a1f6a2-05be-43d5-a2c3-36bc5a878434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.152s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.986048] env[62346]: DEBUG nova.compute.manager [None req-a97b669d-c0f3-4914-8f89-a0b217393d0f tempest-InstanceActionsNegativeTestJSON-1402465515 tempest-InstanceActionsNegativeTestJSON-1402465515-project-member] [instance: 55ab9511-9e85-426e-b6bc-829f88c534f4] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.012479] env[62346]: DEBUG nova.compute.manager [None req-a97b669d-c0f3-4914-8f89-a0b217393d0f tempest-InstanceActionsNegativeTestJSON-1402465515 tempest-InstanceActionsNegativeTestJSON-1402465515-project-member] [instance: 55ab9511-9e85-426e-b6bc-829f88c534f4] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.036363] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a97b669d-c0f3-4914-8f89-a0b217393d0f tempest-InstanceActionsNegativeTestJSON-1402465515 tempest-InstanceActionsNegativeTestJSON-1402465515-project-member] Lock "55ab9511-9e85-426e-b6bc-829f88c534f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.024s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.046347] env[62346]: DEBUG nova.compute.manager [None req-9a916406-af80-4be1-bf48-ade97a24ecc5 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: ca26d6e1-b841-490c-bfd6-33351926e630] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.088239] env[62346]: DEBUG nova.compute.manager [None req-9a916406-af80-4be1-bf48-ade97a24ecc5 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: ca26d6e1-b841-490c-bfd6-33351926e630] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 844.112436] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9a916406-af80-4be1-bf48-ade97a24ecc5 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "ca26d6e1-b841-490c-bfd6-33351926e630" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.034s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.122824] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 844.185883] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.186172] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.187932] env[62346]: INFO nova.compute.claims [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.590683] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01094b97-2203-4176-97d4-a22afcdb8339 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.600192] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d601b581-4733-4206-889b-bf6c29457a79 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.632754] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e62bab-7f2c-48d4-9b4a-09d09ae3551a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.643250] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc3d030-b97b-4cd0-ba16-35138a6b5434 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.658370] env[62346]: DEBUG nova.compute.provider_tree [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.668201] env[62346]: DEBUG nova.scheduler.client.report [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.676525] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.676757] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.683947] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.498s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.684372] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 844.722712] env[62346]: DEBUG nova.compute.utils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 844.722893] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 844.723080] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 844.732612] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 844.807441] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 844.817780] env[62346]: DEBUG nova.policy [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94e7abb3c97341ea8b980089524ced45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd9cc5c4d97b46b290004d72385eea3a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 844.833954] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.834225] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.834385] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.834569] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.834721] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.834862] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.835086] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.835247] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.835412] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.835575] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.835746] env[62346]: DEBUG nova.virt.hardware [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.836628] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf199eed-08bf-40f7-aac6-d4ac7bc2ce61 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.846478] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e4f445-c495-4fec-bd5b-04051b57872c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.365320] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Successfully created port: f919d72c-8102-4d95-b674-6790a7bd3e04 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.232922] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Successfully updated port: f919d72c-8102-4d95-b674-6790a7bd3e04 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.249748] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "refresh_cache-c72a59f9-220d-4da4-8daa-2724ab255190" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.249901] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquired lock "refresh_cache-c72a59f9-220d-4da4-8daa-2724ab255190" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.250064] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.300964] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.330150] env[62346]: DEBUG nova.compute.manager [req-2ac5f1b8-1b40-4940-8e12-786493389c86 req-85d55c17-7869-442f-b200-286e6d3d4c01 service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Received event network-vif-plugged-f919d72c-8102-4d95-b674-6790a7bd3e04 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 846.330150] env[62346]: DEBUG oslo_concurrency.lockutils [req-2ac5f1b8-1b40-4940-8e12-786493389c86 req-85d55c17-7869-442f-b200-286e6d3d4c01 service nova] Acquiring lock "c72a59f9-220d-4da4-8daa-2724ab255190-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.330334] env[62346]: DEBUG oslo_concurrency.lockutils [req-2ac5f1b8-1b40-4940-8e12-786493389c86 req-85d55c17-7869-442f-b200-286e6d3d4c01 service nova] Lock "c72a59f9-220d-4da4-8daa-2724ab255190-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.331079] env[62346]: DEBUG oslo_concurrency.lockutils [req-2ac5f1b8-1b40-4940-8e12-786493389c86 req-85d55c17-7869-442f-b200-286e6d3d4c01 service nova] Lock "c72a59f9-220d-4da4-8daa-2724ab255190-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.331079] env[62346]: DEBUG nova.compute.manager [req-2ac5f1b8-1b40-4940-8e12-786493389c86 req-85d55c17-7869-442f-b200-286e6d3d4c01 service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] No waiting events found dispatching network-vif-plugged-f919d72c-8102-4d95-b674-6790a7bd3e04 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 846.331079] env[62346]: WARNING nova.compute.manager [req-2ac5f1b8-1b40-4940-8e12-786493389c86 req-85d55c17-7869-442f-b200-286e6d3d4c01 service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Received unexpected event network-vif-plugged-f919d72c-8102-4d95-b674-6790a7bd3e04 for instance with vm_state building and task_state spawning. [ 846.585492] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Updating instance_info_cache with network_info: [{"id": "f919d72c-8102-4d95-b674-6790a7bd3e04", "address": "fa:16:3e:c2:8a:c6", "network": {"id": "a297d27f-aa87-4639-8863-5585808c66b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-257794345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fd9cc5c4d97b46b290004d72385eea3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf919d72c-81", "ovs_interfaceid": "f919d72c-8102-4d95-b674-6790a7bd3e04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.601980] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Releasing lock "refresh_cache-c72a59f9-220d-4da4-8daa-2724ab255190" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.602337] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance network_info: |[{"id": "f919d72c-8102-4d95-b674-6790a7bd3e04", "address": "fa:16:3e:c2:8a:c6", "network": {"id": "a297d27f-aa87-4639-8863-5585808c66b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-257794345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fd9cc5c4d97b46b290004d72385eea3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf919d72c-81", "ovs_interfaceid": "f919d72c-8102-4d95-b674-6790a7bd3e04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 846.603661] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:8a:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b356db78-99c7-4464-822c-fc7e193f7878', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f919d72c-8102-4d95-b674-6790a7bd3e04', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.611871] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Creating folder: Project (fd9cc5c4d97b46b290004d72385eea3a). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 846.612664] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b98c451c-ca0a-40d6-a1af-8bf8245d3d3b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.626445] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Created folder: Project (fd9cc5c4d97b46b290004d72385eea3a) in parent group-v953204. [ 846.626736] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Creating folder: Instances. Parent ref: group-v953254. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 846.626913] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f969cb0-8202-4ea0-8190-31db7b7db959 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.638375] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Created folder: Instances in parent group-v953254. [ 846.638710] env[62346]: DEBUG oslo.service.loopingcall [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.639100] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 846.639192] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc59bea6-b9ff-48e7-8d7a-9791f59a440a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.661019] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.661019] env[62346]: value = "task-4891647" [ 846.661019] env[62346]: _type = "Task" [ 846.661019] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.670022] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891647, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.170962] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891647, 'name': CreateVM_Task, 'duration_secs': 0.306193} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.171757] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 847.172362] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.172362] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.172571] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 847.172741] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e8d8781-2cb9-4424-8008-05030dc46feb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.177417] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Waiting for the task: (returnval){ [ 847.177417] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52068436-33b5-fabc-30c0-214cdd41791c" [ 847.177417] env[62346]: _type = "Task" [ 847.177417] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.185639] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52068436-33b5-fabc-30c0-214cdd41791c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.687932] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.688263] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.688432] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.358783] env[62346]: DEBUG nova.compute.manager [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Received event network-changed-f919d72c-8102-4d95-b674-6790a7bd3e04 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 848.359042] env[62346]: DEBUG nova.compute.manager [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Refreshing instance network info cache due to event network-changed-f919d72c-8102-4d95-b674-6790a7bd3e04. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 848.359906] env[62346]: DEBUG oslo_concurrency.lockutils [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] Acquiring lock "refresh_cache-c72a59f9-220d-4da4-8daa-2724ab255190" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.360204] env[62346]: DEBUG oslo_concurrency.lockutils [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] Acquired lock "refresh_cache-c72a59f9-220d-4da4-8daa-2724ab255190" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.360458] env[62346]: DEBUG nova.network.neutron [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Refreshing network info cache for port f919d72c-8102-4d95-b674-6790a7bd3e04 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 848.901301] env[62346]: DEBUG nova.network.neutron [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Updated VIF entry in instance network info cache for port f919d72c-8102-4d95-b674-6790a7bd3e04. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 848.901623] env[62346]: DEBUG nova.network.neutron [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Updating instance_info_cache with network_info: [{"id": "f919d72c-8102-4d95-b674-6790a7bd3e04", "address": "fa:16:3e:c2:8a:c6", "network": {"id": "a297d27f-aa87-4639-8863-5585808c66b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-257794345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fd9cc5c4d97b46b290004d72385eea3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf919d72c-81", "ovs_interfaceid": "f919d72c-8102-4d95-b674-6790a7bd3e04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.913183] env[62346]: DEBUG oslo_concurrency.lockutils [req-5b786ec2-06c3-4b0b-93d6-6eac36886533 req-d87fcf30-4982-429b-9270-fd8db372b11b service nova] Releasing lock "refresh_cache-c72a59f9-220d-4da4-8daa-2724ab255190" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.221904] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.232037] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.232272] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.232454] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.232638] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 855.233794] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350eb89b-785f-45b9-b7fb-8702682c0075 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.243317] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a37a980-b263-463a-8f9a-439d010d682a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.257981] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e8cdef-fb15-40b2-93db-1d08cf139fd5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.265142] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525a20f2-84e4-4b7e-9f21-9a7feb30ff0f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.294452] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180571MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 855.294609] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.294804] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.369421] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.369579] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.369704] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.369823] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.369939] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.370066] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.370184] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.370408] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.370544] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.370657] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 855.383895] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.395676] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.407199] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.417858] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9feb7773-2b61-464f-878b-0a8ee21a22c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.428777] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c4fb8b-3897-4c85-b40c-710dc4d1fb16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.441440] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 92241377-06e3-41e1-bae5-718f1ae5908b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.452832] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 69db1596-7767-40cc-9872-45574c4f681e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.463306] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0889843e-cf4a-4ab3-9702-a2599fac93ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.474154] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f96e666-ac1d-48a1-b663-86f1bb9b64d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.485597] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 823c0133-92a4-4f86-9df5-7fdf57ccc9f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.497503] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7f1f9951-6292-4b19-8567-16495b9f90cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.509254] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4b4ad569-a890-4ade-aa72-313f8d87d430 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.520560] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1ee08a27-8aa5-49b8-abf7-7d69dee25a4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.533619] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d8fa321b-9703-4b46-b2c3-5889b03a9116 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.546882] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 40b09c7e-1206-4b4c-a1a6-e556a5af2743 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.561023] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c27528-211f-4c7b-ad25-fb6f2d8c7faf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.572628] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 855.572885] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 855.573060] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '26', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'io_workload': '10', 'num_proj_67d7b1e9bec14d2f8be2b7d5f9da1973': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'num_task_spawning': '1', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 855.933747] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3849be44-2894-48db-a620-46ca0cfac425 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.943342] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf9136c-7d41-4a9d-869f-549ca9f7eec0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.973233] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7425495a-1ffa-4192-839b-e7c0ebfc81ac {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.981244] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b0fd62-2820-439e-943a-46b657380e77 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.995410] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.004274] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.018230] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 856.018414] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.724s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.014032] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.039999] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.039999] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.219676] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.220085] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.220085] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 860.220185] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 860.240520] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.240679] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.240753] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.240883] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241016] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241152] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241273] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241396] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241514] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241669] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 860.241818] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 860.242308] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.242509] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.242643] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 863.220084] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.220443] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 890.882470] env[62346]: WARNING oslo_vmware.rw_handles [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 890.882470] env[62346]: ERROR oslo_vmware.rw_handles [ 890.883337] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 890.884701] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 890.884946] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Copying Virtual Disk [datastore2] vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/22cf2cf4-de38-4e13-9219-088b411cd445/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 890.885275] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f54b0942-2fa3-4fc5-b185-992d40ab7fab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.893710] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Waiting for the task: (returnval){ [ 890.893710] env[62346]: value = "task-4891648" [ 890.893710] env[62346]: _type = "Task" [ 890.893710] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.902907] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Task: {'id': task-4891648, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.405277] env[62346]: DEBUG oslo_vmware.exceptions [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 891.405516] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.406775] env[62346]: ERROR nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 891.406775] env[62346]: Faults: ['InvalidArgument'] [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Traceback (most recent call last): [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] yield resources [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self.driver.spawn(context, instance, image_meta, [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self._fetch_image_if_missing(context, vi) [ 891.406775] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] image_cache(vi, tmp_image_ds_loc) [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] vm_util.copy_virtual_disk( [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] session._wait_for_task(vmdk_copy_task) [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] return self.wait_for_task(task_ref) [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] return evt.wait() [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] result = hub.switch() [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 891.407241] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] return self.greenlet.switch() [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self.f(*self.args, **self.kw) [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] raise exceptions.translate_fault(task_info.error) [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Faults: ['InvalidArgument'] [ 891.407630] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] [ 891.407630] env[62346]: INFO nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Terminating instance [ 891.408701] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.408901] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.409155] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e667c7da-da30-4f66-844a-e832ec7b9b5e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.411419] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 891.411609] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 891.412341] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4649157c-2b53-4652-a81b-945b594e9ee6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.420067] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 891.420356] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5273e439-e7b3-4ce4-9773-253002dfa8f9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.422930] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.423124] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 891.424036] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9312a17d-d2cf-4b8d-9a9c-29c8e290712a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.429688] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Waiting for the task: (returnval){ [ 891.429688] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52c14f31-4fe3-31ac-2719-25547ff7bada" [ 891.429688] env[62346]: _type = "Task" [ 891.429688] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.439549] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52c14f31-4fe3-31ac-2719-25547ff7bada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.501175] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 891.501432] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 891.501793] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Deleting the datastore file [datastore2] eac976d1-2988-4106-ac61-59b8c1d9c1a3 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.502106] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abbe23ca-5275-4bf6-9b85-f3ddc7a07b55 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.509343] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Waiting for the task: (returnval){ [ 891.509343] env[62346]: value = "task-4891650" [ 891.509343] env[62346]: _type = "Task" [ 891.509343] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.518597] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Task: {'id': task-4891650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.941137] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 891.941453] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Creating directory with path [datastore2] vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.941639] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c9acbef-5355-4bfe-b9ef-fd736118e915 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.953614] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Created directory with path [datastore2] vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.953814] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Fetch image to [datastore2] vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 891.953985] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 891.954747] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cbcf6a-60d3-4ed7-8a9f-dd230276c338 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.962302] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bc9f48-2e1a-4af9-814d-4bd8ad5725c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.971889] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b6449a-f8d9-41d5-9fa0-72cad4c226c4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.002841] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdef3b5-7a25-416d-ac02-ff2156719fd3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.008740] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-df753583-b872-49f0-9f60-1543c048e904 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.017964] env[62346]: DEBUG oslo_vmware.api [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Task: {'id': task-4891650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06689} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.018225] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.018443] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 892.018684] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.018789] env[62346]: INFO nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 892.022942] env[62346]: DEBUG nova.compute.claims [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 892.023139] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.023369] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.035647] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 892.099804] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 892.162409] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 892.162684] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 892.504844] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa43e13-6fb3-451d-b3b3-7879f0223da7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.514123] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931965d6-9c46-43c9-9997-fc44dc60285d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.545917] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c65719-9662-4b7b-a14c-c0e023aae5a4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.555237] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc2c671-5507-4dd0-ad55-8b4e968f9de5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.571542] env[62346]: DEBUG nova.compute.provider_tree [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.582398] env[62346]: DEBUG nova.scheduler.client.report [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 892.600249] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.576s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.600473] env[62346]: ERROR nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 892.600473] env[62346]: Faults: ['InvalidArgument'] [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Traceback (most recent call last): [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self.driver.spawn(context, instance, image_meta, [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self._fetch_image_if_missing(context, vi) [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] image_cache(vi, tmp_image_ds_loc) [ 892.600473] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] vm_util.copy_virtual_disk( [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] session._wait_for_task(vmdk_copy_task) [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] return self.wait_for_task(task_ref) [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] return evt.wait() [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] result = hub.switch() [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] return self.greenlet.switch() [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 892.601478] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] self.f(*self.args, **self.kw) [ 892.602087] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 892.602087] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] raise exceptions.translate_fault(task_info.error) [ 892.602087] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 892.602087] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Faults: ['InvalidArgument'] [ 892.602087] env[62346]: ERROR nova.compute.manager [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] [ 892.602087] env[62346]: DEBUG nova.compute.utils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 892.602882] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Build of instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 was re-scheduled: A specified parameter was not correct: fileType [ 892.602882] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 892.603295] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 892.603470] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 892.603650] env[62346]: DEBUG nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 892.603836] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.037427] env[62346]: DEBUG nova.network.neutron [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.051337] env[62346]: INFO nova.compute.manager [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Took 0.45 seconds to deallocate network for instance. [ 893.203402] env[62346]: INFO nova.scheduler.client.report [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Deleted allocations for instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 [ 893.226511] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4e5d45df-c7a9-44c6-98ac-ba858bead97a tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 347.037s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.228038] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 149.231s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.228259] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Acquiring lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.228504] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.228676] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.230813] env[62346]: INFO nova.compute.manager [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Terminating instance [ 893.232620] env[62346]: DEBUG nova.compute.manager [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 893.232801] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.233283] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7441a4d-f5fc-40c9-b17f-bf5b953d09ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.238523] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 893.245485] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f578d7b-d7a2-4a1d-868c-34328010fdbe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.279186] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eac976d1-2988-4106-ac61-59b8c1d9c1a3 could not be found. [ 893.279564] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 893.279687] env[62346]: INFO nova.compute.manager [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 893.279847] env[62346]: DEBUG oslo.service.loopingcall [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.284479] env[62346]: DEBUG nova.compute.manager [-] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 893.284663] env[62346]: DEBUG nova.network.neutron [-] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 893.301104] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.301363] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.302985] env[62346]: INFO nova.compute.claims [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 893.325727] env[62346]: DEBUG nova.network.neutron [-] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.339215] env[62346]: INFO nova.compute.manager [-] [instance: eac976d1-2988-4106-ac61-59b8c1d9c1a3] Took 0.05 seconds to deallocate network for instance. [ 893.454806] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d213a3b9-ef1b-4e71-ae1f-85ab3d66c27e tempest-ServerDiagnosticsNegativeTest-2033188068 tempest-ServerDiagnosticsNegativeTest-2033188068-project-member] Lock "eac976d1-2988-4106-ac61-59b8c1d9c1a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.226s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.722766] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b94a58d-8f7a-41f8-871e-7337f90daf9f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.732366] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d8896c-4aba-4511-99bc-4cb294ba2a24 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.762612] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8dcc70-49bc-4439-8cf5-dc19c96ca404 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.770937] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd465c8-9106-4f7f-8559-a7d4997e1a1b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.785687] env[62346]: DEBUG nova.compute.provider_tree [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.795720] env[62346]: DEBUG nova.scheduler.client.report [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.813567] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.512s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.814244] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 893.853971] env[62346]: DEBUG nova.compute.utils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.855285] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 893.855628] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 893.865678] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 893.934154] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 893.938055] env[62346]: DEBUG nova.policy [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08f7f4a535db4432bedd493a0f363c51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04179eaf0efd4e43a9d4eb1445ffc270', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 893.965688] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 893.966028] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 893.966110] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.966270] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 893.966422] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.966568] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 893.966777] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 893.966986] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 893.967102] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 893.967270] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 893.967438] env[62346]: DEBUG nova.virt.hardware [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 893.968505] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb174ce-b608-433c-ad2f-d18714b26145 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.977027] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ed2bc1-a5dc-4fc8-a71f-c5c00ede31e0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.478022] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Successfully created port: 5fea9ca0-3d82-41db-b345-b1d13bacf38b {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.439867] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Successfully updated port: 5fea9ca0-3d82-41db-b345-b1d13bacf38b {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.452882] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "refresh_cache-af33f439-7ebe-478a-83ee-f7fc8e7b630d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.453346] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquired lock "refresh_cache-af33f439-7ebe-478a-83ee-f7fc8e7b630d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.453554] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 895.460467] env[62346]: DEBUG nova.compute.manager [req-c17fe463-057c-4457-9682-3eb4868489f9 req-a18bf2d5-c767-433f-a1aa-66193fecae08 service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Received event network-vif-plugged-5fea9ca0-3d82-41db-b345-b1d13bacf38b {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 895.460467] env[62346]: DEBUG oslo_concurrency.lockutils [req-c17fe463-057c-4457-9682-3eb4868489f9 req-a18bf2d5-c767-433f-a1aa-66193fecae08 service nova] Acquiring lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.460696] env[62346]: DEBUG oslo_concurrency.lockutils [req-c17fe463-057c-4457-9682-3eb4868489f9 req-a18bf2d5-c767-433f-a1aa-66193fecae08 service nova] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.460739] env[62346]: DEBUG oslo_concurrency.lockutils [req-c17fe463-057c-4457-9682-3eb4868489f9 req-a18bf2d5-c767-433f-a1aa-66193fecae08 service nova] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.460899] env[62346]: DEBUG nova.compute.manager [req-c17fe463-057c-4457-9682-3eb4868489f9 req-a18bf2d5-c767-433f-a1aa-66193fecae08 service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] No waiting events found dispatching network-vif-plugged-5fea9ca0-3d82-41db-b345-b1d13bacf38b {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 895.461082] env[62346]: WARNING nova.compute.manager [req-c17fe463-057c-4457-9682-3eb4868489f9 req-a18bf2d5-c767-433f-a1aa-66193fecae08 service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Received unexpected event network-vif-plugged-5fea9ca0-3d82-41db-b345-b1d13bacf38b for instance with vm_state building and task_state spawning. [ 895.512081] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.573616] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "c72a59f9-220d-4da4-8daa-2724ab255190" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.776557] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Updating instance_info_cache with network_info: [{"id": "5fea9ca0-3d82-41db-b345-b1d13bacf38b", "address": "fa:16:3e:e1:87:c1", "network": {"id": "677bcff7-a69f-48d2-9d70-4866d110719a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2103235217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04179eaf0efd4e43a9d4eb1445ffc270", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fea9ca0-3d", "ovs_interfaceid": "5fea9ca0-3d82-41db-b345-b1d13bacf38b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.793296] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Releasing lock "refresh_cache-af33f439-7ebe-478a-83ee-f7fc8e7b630d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.793612] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance network_info: |[{"id": "5fea9ca0-3d82-41db-b345-b1d13bacf38b", "address": "fa:16:3e:e1:87:c1", "network": {"id": "677bcff7-a69f-48d2-9d70-4866d110719a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2103235217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04179eaf0efd4e43a9d4eb1445ffc270", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fea9ca0-3d", "ovs_interfaceid": "5fea9ca0-3d82-41db-b345-b1d13bacf38b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 895.794048] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:87:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fea9ca0-3d82-41db-b345-b1d13bacf38b', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.803933] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Creating folder: Project (04179eaf0efd4e43a9d4eb1445ffc270). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 895.804564] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f09befc-7d9d-48bc-8014-4ee72deeafe4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.816303] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Created folder: Project (04179eaf0efd4e43a9d4eb1445ffc270) in parent group-v953204. [ 895.816506] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Creating folder: Instances. Parent ref: group-v953257. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 895.816747] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43da1c28-1dcd-4daf-804b-7acde4bc6fd9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.826432] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Created folder: Instances in parent group-v953257. [ 895.826828] env[62346]: DEBUG oslo.service.loopingcall [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.826933] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 895.827207] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab530f17-5a06-4ff3-84a2-31e8f55a2633 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.847457] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.847457] env[62346]: value = "task-4891653" [ 895.847457] env[62346]: _type = "Task" [ 895.847457] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.856328] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891653, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.357625] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891653, 'name': CreateVM_Task, 'duration_secs': 0.324575} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.357806] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 896.358527] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.358710] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.359040] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.359294] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-604f4b45-b6ef-4796-808e-c5c301d8a6fb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.364479] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Waiting for the task: (returnval){ [ 896.364479] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ba6b51-2f2e-43b7-4096-ff3e4221d7c1" [ 896.364479] env[62346]: _type = "Task" [ 896.364479] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.372856] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ba6b51-2f2e-43b7-4096-ff3e4221d7c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.875877] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.875877] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.876355] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.581571] env[62346]: DEBUG nova.compute.manager [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Received event network-changed-5fea9ca0-3d82-41db-b345-b1d13bacf38b {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 897.583403] env[62346]: DEBUG nova.compute.manager [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Refreshing instance network info cache due to event network-changed-5fea9ca0-3d82-41db-b345-b1d13bacf38b. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 897.583403] env[62346]: DEBUG oslo_concurrency.lockutils [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] Acquiring lock "refresh_cache-af33f439-7ebe-478a-83ee-f7fc8e7b630d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.583403] env[62346]: DEBUG oslo_concurrency.lockutils [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] Acquired lock "refresh_cache-af33f439-7ebe-478a-83ee-f7fc8e7b630d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.583403] env[62346]: DEBUG nova.network.neutron [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Refreshing network info cache for port 5fea9ca0-3d82-41db-b345-b1d13bacf38b {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 897.996904] env[62346]: DEBUG nova.network.neutron [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Updated VIF entry in instance network info cache for port 5fea9ca0-3d82-41db-b345-b1d13bacf38b. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 897.996904] env[62346]: DEBUG nova.network.neutron [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Updating instance_info_cache with network_info: [{"id": "5fea9ca0-3d82-41db-b345-b1d13bacf38b", "address": "fa:16:3e:e1:87:c1", "network": {"id": "677bcff7-a69f-48d2-9d70-4866d110719a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2103235217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "04179eaf0efd4e43a9d4eb1445ffc270", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fea9ca0-3d", "ovs_interfaceid": "5fea9ca0-3d82-41db-b345-b1d13bacf38b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.008447] env[62346]: DEBUG oslo_concurrency.lockutils [req-4052c172-dac2-4943-aa32-4532658e3708 req-0488f818-660c-45da-9cb9-e13e6f1c726d service nova] Releasing lock "refresh_cache-af33f439-7ebe-478a-83ee-f7fc8e7b630d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.723565] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.723932] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.221247] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.234118] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.234439] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.234666] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.234874] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 917.236517] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f97596-1540-487a-9381-ed5ac3cb5067 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.247465] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a383602d-399c-4fd7-a46c-5812eced6700 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.265437] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15239a49-d72c-4f2b-b978-a31a2f3586e8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.273118] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8b4187-e63d-43d9-bc9e-9e3263ca2551 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.305628] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180593MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 917.305628] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.305628] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.407181] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.407351] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.407479] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.407601] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.407720] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.408302] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.408302] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.408302] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.408302] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.408598] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 917.420282] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.432243] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.444169] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9feb7773-2b61-464f-878b-0a8ee21a22c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.455556] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c4fb8b-3897-4c85-b40c-710dc4d1fb16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.466569] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 92241377-06e3-41e1-bae5-718f1ae5908b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.480820] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 69db1596-7767-40cc-9872-45574c4f681e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.492207] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0889843e-cf4a-4ab3-9702-a2599fac93ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.503933] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f96e666-ac1d-48a1-b663-86f1bb9b64d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.514311] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 823c0133-92a4-4f86-9df5-7fdf57ccc9f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.527483] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7f1f9951-6292-4b19-8567-16495b9f90cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.537792] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4b4ad569-a890-4ade-aa72-313f8d87d430 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.548194] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1ee08a27-8aa5-49b8-abf7-7d69dee25a4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.558770] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d8fa321b-9703-4b46-b2c3-5889b03a9116 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.570505] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 40b09c7e-1206-4b4c-a1a6-e556a5af2743 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.581704] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c27528-211f-4c7b-ad25-fb6f2d8c7faf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.591621] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.602274] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 917.602517] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 917.602676] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '27', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_57259b412e4744f28b85130103f6018f': '1', 'io_workload': '10', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'num_task_spawning': '1', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 917.927173] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35273729-9a65-4316-9600-2c61ee9d7375 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.936086] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e61b547-ec16-4b32-b375-b26f1f62ca87 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.965688] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9702d26e-38fe-40f9-944c-e83489908537 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.974147] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c5b75b-de45-4337-9f55-3dab715b294f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.988013] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.996999] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.011348] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 918.011652] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.706s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.011567] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.216048] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.220066] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.220066] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 920.220066] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 920.243473] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.243644] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.243776] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.243905] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244044] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244154] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244269] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244387] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244503] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244620] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 920.244740] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 920.245309] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.220146] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.219488] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.219714] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 923.221772] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.220237] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.630035] env[62346]: WARNING oslo_vmware.rw_handles [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 940.630035] env[62346]: ERROR oslo_vmware.rw_handles [ 940.630688] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 940.632714] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 940.632965] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Copying Virtual Disk [datastore2] vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/96dec7bb-74fe-4907-9a1c-2b3e296c554f/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 940.633334] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f02464b6-69b2-4592-a909-377d9ea502a0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.643515] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Waiting for the task: (returnval){ [ 940.643515] env[62346]: value = "task-4891654" [ 940.643515] env[62346]: _type = "Task" [ 940.643515] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.653231] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Task: {'id': task-4891654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.156599] env[62346]: DEBUG oslo_vmware.exceptions [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 941.156886] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.157444] env[62346]: ERROR nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.157444] env[62346]: Faults: ['InvalidArgument'] [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Traceback (most recent call last): [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] yield resources [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self.driver.spawn(context, instance, image_meta, [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self._fetch_image_if_missing(context, vi) [ 941.157444] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] image_cache(vi, tmp_image_ds_loc) [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] vm_util.copy_virtual_disk( [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] session._wait_for_task(vmdk_copy_task) [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] return self.wait_for_task(task_ref) [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] return evt.wait() [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] result = hub.switch() [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 941.157769] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] return self.greenlet.switch() [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self.f(*self.args, **self.kw) [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] raise exceptions.translate_fault(task_info.error) [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Faults: ['InvalidArgument'] [ 941.158141] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] [ 941.158141] env[62346]: INFO nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Terminating instance [ 941.159429] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.159656] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.159866] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07e46df7-d2ec-461d-bf09-5a817e1f469a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.162224] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 941.162422] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.163155] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee779cb-4d92-45ba-9c81-32de61063e21 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.171215] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.171466] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acae722b-f709-4881-8301-f24d25e81620 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.174123] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.174123] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 941.175142] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24a2e2fa-9a88-4193-89a3-1238edd4f624 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.180675] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 941.180675] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52a743a2-18dd-eb6c-fdef-2ed1eacccf68" [ 941.180675] env[62346]: _type = "Task" [ 941.180675] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.194729] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52a743a2-18dd-eb6c-fdef-2ed1eacccf68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.245983] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.245983] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.245983] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Deleting the datastore file [datastore2] 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.246289] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-650ec7da-96e3-4e64-a139-5f78b1f45458 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.254954] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Waiting for the task: (returnval){ [ 941.254954] env[62346]: value = "task-4891656" [ 941.254954] env[62346]: _type = "Task" [ 941.254954] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.264031] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Task: {'id': task-4891656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.694023] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 941.694023] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating directory with path [datastore2] vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.694023] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f35e19e-c01b-4171-8106-06cc63eef977 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.711059] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Created directory with path [datastore2] vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.711059] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Fetch image to [datastore2] vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 941.711059] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 941.711059] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67c403d-4685-4a7b-96f2-c77b07faf77d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.719219] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf3f10f-b726-4749-b7ed-a8d2b444a7dc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.732025] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84255a27-7cc9-4046-a22c-fdb3426aba5c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.776739] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e80387e-f1dc-4ac5-8404-4d215be1a988 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.786448] env[62346]: DEBUG oslo_vmware.api [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Task: {'id': task-4891656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085115} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.788145] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.788440] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.788701] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.789029] env[62346]: INFO nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Took 0.63 seconds to destroy the instance on the hypervisor. [ 941.792160] env[62346]: DEBUG nova.compute.claims [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 941.792388] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.792634] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.795169] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-64ec3acd-bc36-47ea-8446-4b64b46c3a10 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.823359] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 941.907888] env[62346]: DEBUG oslo_vmware.rw_handles [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 941.975325] env[62346]: DEBUG oslo_vmware.rw_handles [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 941.975534] env[62346]: DEBUG oslo_vmware.rw_handles [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 942.348278] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e83a86-dfea-4fbd-b86a-83fd2d0be2a7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.356708] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766d648a-8b4b-4eba-aa7d-6ec82a0960df {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.387533] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6f8085-4247-4008-89b4-cc4685973f94 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.395944] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbe23fd-6ce6-4cf2-b4ae-86e86b8dfe40 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.410380] env[62346]: DEBUG nova.compute.provider_tree [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.420587] env[62346]: DEBUG nova.scheduler.client.report [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.443354] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.650s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.443813] env[62346]: ERROR nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.443813] env[62346]: Faults: ['InvalidArgument'] [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Traceback (most recent call last): [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self.driver.spawn(context, instance, image_meta, [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self._fetch_image_if_missing(context, vi) [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] image_cache(vi, tmp_image_ds_loc) [ 942.443813] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] vm_util.copy_virtual_disk( [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] session._wait_for_task(vmdk_copy_task) [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] return self.wait_for_task(task_ref) [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] return evt.wait() [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] result = hub.switch() [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] return self.greenlet.switch() [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 942.444163] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] self.f(*self.args, **self.kw) [ 942.444497] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 942.444497] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] raise exceptions.translate_fault(task_info.error) [ 942.444497] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.444497] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Faults: ['InvalidArgument'] [ 942.444497] env[62346]: ERROR nova.compute.manager [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] [ 942.444712] env[62346]: DEBUG nova.compute.utils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 942.446646] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Build of instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 was re-scheduled: A specified parameter was not correct: fileType [ 942.446646] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 942.447040] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 942.447224] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 942.447435] env[62346]: DEBUG nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 942.447611] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.872022] env[62346]: DEBUG nova.network.neutron [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.887749] env[62346]: INFO nova.compute.manager [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Took 0.44 seconds to deallocate network for instance. [ 943.023892] env[62346]: INFO nova.scheduler.client.report [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Deleted allocations for instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 [ 943.052253] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0a8095b-1cc9-4580-a95d-03199343c3de tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 397.049s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.053333] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 199.400s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.053396] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Acquiring lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.053566] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.053699] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.056168] env[62346]: INFO nova.compute.manager [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Terminating instance [ 943.058102] env[62346]: DEBUG nova.compute.manager [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 943.058303] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 943.058782] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e643edd4-64ab-4d4f-9bdb-53b443620fa3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.071811] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1043827c-5f01-4af3-ae37-ccd7f1f60572 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.083524] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 943.114063] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2 could not be found. [ 943.114063] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 943.114063] env[62346]: INFO nova.compute.manager [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 943.114063] env[62346]: DEBUG oslo.service.loopingcall [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.114063] env[62346]: DEBUG nova.compute.manager [-] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 943.114409] env[62346]: DEBUG nova.network.neutron [-] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 943.140885] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.141273] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.142634] env[62346]: INFO nova.compute.claims [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.147025] env[62346]: DEBUG nova.network.neutron [-] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.160803] env[62346]: INFO nova.compute.manager [-] [instance: 6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2] Took 0.05 seconds to deallocate network for instance. [ 943.298772] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5180e338-a704-4e9c-8e09-cb49b5649e3c tempest-ServerDiagnosticsTest-321192867 tempest-ServerDiagnosticsTest-321192867-project-member] Lock "6f2656bf-8ae5-4741-bdbb-7b42fafd4fd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.245s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.679287] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4b8d9e-0970-4405-b0eb-eb3133f3fa34 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.688683] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef093a-c195-45c2-ad3b-2955977859ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.728826] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cc9606-2b8e-4738-8c43-da430a0809fe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.737532] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6fe801-5203-4f20-9fb8-49589cbcc160 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.757641] env[62346]: DEBUG nova.compute.provider_tree [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.770153] env[62346]: DEBUG nova.scheduler.client.report [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.795072] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.654s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.795850] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 943.842448] env[62346]: DEBUG nova.compute.utils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.843787] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 943.847023] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 943.854116] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 943.933662] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 943.961560] env[62346]: DEBUG nova.policy [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '016eb5d9a0974aaa959da05fe3d385ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a625cf2514b140fcb029253fd85f6ee8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 943.966513] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.966513] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.966513] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.966652] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.966652] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.966652] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.966652] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.966652] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.966782] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.966782] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.966782] env[62346]: DEBUG nova.virt.hardware [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.968129] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e50cf6-0960-429d-8d02-122423bb1cfc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.981661] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f87796-b5c0-4864-aed3-3f948f5a3cee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.503622] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Successfully created port: 5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.227600] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.228208] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.468763] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Successfully updated port: 5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.487972] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "refresh_cache-6155a6c2-3d55-4fe6-bade-a97db98796a0" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.488221] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquired lock "refresh_cache-6155a6c2-3d55-4fe6-bade-a97db98796a0" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.488280] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.747087] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.846162] env[62346]: DEBUG nova.compute.manager [req-c2400533-51ab-4ef1-938f-2995b4640ca0 req-1f71f7c2-67ce-40df-b659-25b87733bb46 service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Received event network-vif-plugged-5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 945.846273] env[62346]: DEBUG oslo_concurrency.lockutils [req-c2400533-51ab-4ef1-938f-2995b4640ca0 req-1f71f7c2-67ce-40df-b659-25b87733bb46 service nova] Acquiring lock "6155a6c2-3d55-4fe6-bade-a97db98796a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.847991] env[62346]: DEBUG oslo_concurrency.lockutils [req-c2400533-51ab-4ef1-938f-2995b4640ca0 req-1f71f7c2-67ce-40df-b659-25b87733bb46 service nova] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.847991] env[62346]: DEBUG oslo_concurrency.lockutils [req-c2400533-51ab-4ef1-938f-2995b4640ca0 req-1f71f7c2-67ce-40df-b659-25b87733bb46 service nova] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.847991] env[62346]: DEBUG nova.compute.manager [req-c2400533-51ab-4ef1-938f-2995b4640ca0 req-1f71f7c2-67ce-40df-b659-25b87733bb46 service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] No waiting events found dispatching network-vif-plugged-5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 945.847991] env[62346]: WARNING nova.compute.manager [req-c2400533-51ab-4ef1-938f-2995b4640ca0 req-1f71f7c2-67ce-40df-b659-25b87733bb46 service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Received unexpected event network-vif-plugged-5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 for instance with vm_state building and task_state spawning. [ 945.997248] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Updating instance_info_cache with network_info: [{"id": "5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5", "address": "fa:16:3e:b7:83:95", "network": {"id": "61ee5618-2161-4854-8112-3f84befc0256", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1489672787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a625cf2514b140fcb029253fd85f6ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5389bd09-22", "ovs_interfaceid": "5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.016020] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Releasing lock "refresh_cache-6155a6c2-3d55-4fe6-bade-a97db98796a0" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.016351] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance network_info: |[{"id": "5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5", "address": "fa:16:3e:b7:83:95", "network": {"id": "61ee5618-2161-4854-8112-3f84befc0256", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1489672787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a625cf2514b140fcb029253fd85f6ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5389bd09-22", "ovs_interfaceid": "5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.016758] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:83:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.025486] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Creating folder: Project (a625cf2514b140fcb029253fd85f6ee8). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 946.026133] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b8b8ab8-b924-4874-9811-5943691e9615 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.037726] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Created folder: Project (a625cf2514b140fcb029253fd85f6ee8) in parent group-v953204. [ 946.038191] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Creating folder: Instances. Parent ref: group-v953260. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 946.038697] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6802236e-3820-4048-8384-865b0e489c10 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.049641] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Created folder: Instances in parent group-v953260. [ 946.050061] env[62346]: DEBUG oslo.service.loopingcall [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.050371] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 946.050637] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce116936-ec85-41e3-a020-816e40e2f271 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.074038] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.074038] env[62346]: value = "task-4891659" [ 946.074038] env[62346]: _type = "Task" [ 946.074038] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.082632] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891659, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.584527] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891659, 'name': CreateVM_Task, 'duration_secs': 0.326822} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.584868] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 946.585398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.585560] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.585864] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 946.586130] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f72641-e9b7-4a14-a39e-959d1c277371 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.591530] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Waiting for the task: (returnval){ [ 946.591530] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f73ee5-f66a-8942-b83f-a3c57b291d04" [ 946.591530] env[62346]: _type = "Task" [ 946.591530] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.599970] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f73ee5-f66a-8942-b83f-a3c57b291d04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.014041] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "045a7d28-8706-4818-be5f-20c03831686e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.014207] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "045a7d28-8706-4818-be5f-20c03831686e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.103428] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.103688] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.103901] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.875268] env[62346]: DEBUG nova.compute.manager [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Received event network-changed-5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 947.875542] env[62346]: DEBUG nova.compute.manager [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Refreshing instance network info cache due to event network-changed-5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 947.875579] env[62346]: DEBUG oslo_concurrency.lockutils [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] Acquiring lock "refresh_cache-6155a6c2-3d55-4fe6-bade-a97db98796a0" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.875718] env[62346]: DEBUG oslo_concurrency.lockutils [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] Acquired lock "refresh_cache-6155a6c2-3d55-4fe6-bade-a97db98796a0" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.875876] env[62346]: DEBUG nova.network.neutron [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Refreshing network info cache for port 5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 948.266409] env[62346]: DEBUG nova.network.neutron [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Updated VIF entry in instance network info cache for port 5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 948.266768] env[62346]: DEBUG nova.network.neutron [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Updating instance_info_cache with network_info: [{"id": "5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5", "address": "fa:16:3e:b7:83:95", "network": {"id": "61ee5618-2161-4854-8112-3f84befc0256", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1489672787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a625cf2514b140fcb029253fd85f6ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5389bd09-22", "ovs_interfaceid": "5389bd09-22d0-4cfd-81d7-2b4e5c71f6d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.281154] env[62346]: DEBUG oslo_concurrency.lockutils [req-fc968865-02bb-49aa-a536-ef982037fdb5 req-2878dbce-3171-4b2e-a7a1-333a395bce9f service nova] Releasing lock "refresh_cache-6155a6c2-3d55-4fe6-bade-a97db98796a0" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.825739] env[62346]: DEBUG oslo_concurrency.lockutils [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.756814] env[62346]: DEBUG oslo_concurrency.lockutils [None req-57f9c5d6-72be-40d4-9fb5-eb719be663ae tempest-ServerExternalEventsTest-714518617 tempest-ServerExternalEventsTest-714518617-project-member] Acquiring lock "723d75ac-364c-4e21-a664-879e99839c16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.757881] env[62346]: DEBUG oslo_concurrency.lockutils [None req-57f9c5d6-72be-40d4-9fb5-eb719be663ae tempest-ServerExternalEventsTest-714518617 tempest-ServerExternalEventsTest-714518617-project-member] Lock "723d75ac-364c-4e21-a664-879e99839c16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.885574] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dd52e83f-c72b-4f69-811c-87734aa0aabd tempest-ServersListShow296Test-1218146697 tempest-ServersListShow296Test-1218146697-project-member] Acquiring lock "67812f8f-9aee-42ac-b22e-5761240cbb7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.885574] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dd52e83f-c72b-4f69-811c-87734aa0aabd tempest-ServersListShow296Test-1218146697 tempest-ServersListShow296Test-1218146697-project-member] Lock "67812f8f-9aee-42ac-b22e-5761240cbb7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.789993] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0be7558-3f95-4314-91f6-692effb8a4f6 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "29516cd3-7d37-40d0-9f8a-9215ae69938b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.789993] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0be7558-3f95-4314-91f6-692effb8a4f6 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "29516cd3-7d37-40d0-9f8a-9215ae69938b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.220596] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 977.241900] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.241900] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.241900] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.241900] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 977.243030] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0200ffd-23af-4fba-9f75-8812679f28a2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.260389] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2e7e27-8310-46ce-b4a3-f04880766e29 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.271731] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634ddb26-c1f1-46a4-94cb-07fd048f6831 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.279804] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f237881e-749c-44ed-926d-51a8e283d59d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.318306] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180585MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 977.318650] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.319018] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.421645] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1157187b-7051-4921-bd95-9ef3e2d17104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421645] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421645] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421645] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421811] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421811] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421811] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421811] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421926] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.421926] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 977.440142] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.453116] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9feb7773-2b61-464f-878b-0a8ee21a22c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.464461] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c4fb8b-3897-4c85-b40c-710dc4d1fb16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.482876] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 92241377-06e3-41e1-bae5-718f1ae5908b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.497500] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 69db1596-7767-40cc-9872-45574c4f681e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.511841] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0889843e-cf4a-4ab3-9702-a2599fac93ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.524470] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6f96e666-ac1d-48a1-b663-86f1bb9b64d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.543733] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 823c0133-92a4-4f86-9df5-7fdf57ccc9f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.557857] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7f1f9951-6292-4b19-8567-16495b9f90cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.570031] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4b4ad569-a890-4ade-aa72-313f8d87d430 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.586242] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1ee08a27-8aa5-49b8-abf7-7d69dee25a4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.601052] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance d8fa321b-9703-4b46-b2c3-5889b03a9116 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.623028] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 40b09c7e-1206-4b4c-a1a6-e556a5af2743 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.636563] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c27528-211f-4c7b-ad25-fb6f2d8c7faf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.655213] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.667048] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.682735] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.698475] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.713094] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 723d75ac-364c-4e21-a664-879e99839c16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.726009] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67812f8f-9aee-42ac-b22e-5761240cbb7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.738977] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29516cd3-7d37-40d0-9f8a-9215ae69938b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 977.738977] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 977.738977] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '28', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'io_workload': '10', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1', 'num_task_spawning': '1', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 978.289221] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b214387-bd88-40f4-a727-adb423c08aec {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.297218] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948f8ba2-6fdf-4152-be57-d5e3293c0a41 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.337087] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f808f90-97a5-48b5-9aa8-5f485de8ad44 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.346456] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80a01cc-087d-4624-941f-3519325d50ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.367532] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.379132] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.401356] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 978.401587] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.083s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.492681] env[62346]: DEBUG oslo_concurrency.lockutils [None req-08082b56-418b-48fa-9db9-e8b4e718ff54 tempest-VolumesAdminNegativeTest-1461191165 tempest-VolumesAdminNegativeTest-1461191165-project-member] Acquiring lock "bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.493065] env[62346]: DEBUG oslo_concurrency.lockutils [None req-08082b56-418b-48fa-9db9-e8b4e718ff54 tempest-VolumesAdminNegativeTest-1461191165 tempest-VolumesAdminNegativeTest-1461191165-project-member] Lock "bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.402656] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.402983] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 981.402983] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 981.426634] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.426911] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.426968] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427074] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427200] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427323] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427490] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427573] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427680] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427798] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 981.427919] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 981.428515] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.428694] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.241538] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.217855] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.243886] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.244178] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.244272] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.244401] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 985.220754] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.344855] env[62346]: WARNING oslo_vmware.rw_handles [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 987.344855] env[62346]: ERROR oslo_vmware.rw_handles [ 987.345505] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 987.347183] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 987.347431] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Copying Virtual Disk [datastore2] vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/67dd4e42-552f-4cd4-85e1-771ce324417f/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 987.347703] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ff2164e-65dd-40c2-acc5-0c837dcfa616 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.355644] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 987.355644] env[62346]: value = "task-4891660" [ 987.355644] env[62346]: _type = "Task" [ 987.355644] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.364813] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': task-4891660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.866628] env[62346]: DEBUG oslo_vmware.exceptions [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 987.866911] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.867486] env[62346]: ERROR nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 987.867486] env[62346]: Faults: ['InvalidArgument'] [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Traceback (most recent call last): [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] yield resources [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self.driver.spawn(context, instance, image_meta, [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self._vmops.spawn(context, instance, image_meta, injected_files, [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self._fetch_image_if_missing(context, vi) [ 987.867486] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] image_cache(vi, tmp_image_ds_loc) [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] vm_util.copy_virtual_disk( [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] session._wait_for_task(vmdk_copy_task) [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] return self.wait_for_task(task_ref) [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] return evt.wait() [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] result = hub.switch() [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 987.867790] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] return self.greenlet.switch() [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self.f(*self.args, **self.kw) [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] raise exceptions.translate_fault(task_info.error) [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Faults: ['InvalidArgument'] [ 987.868139] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] [ 987.868139] env[62346]: INFO nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Terminating instance [ 987.869373] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.869646] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.870369] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 987.870577] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 987.870897] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aebcce5f-ea3c-4009-97d4-58d416cd8e39 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.873246] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9698009-9693-43dd-ab2a-37c1c34459cd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.880252] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 987.880480] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c88e39e8-a88e-46a4-b8af-787c21fe4268 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.882743] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.882916] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 987.883891] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa64339b-1c5c-424e-befe-63746af77942 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.888863] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Waiting for the task: (returnval){ [ 987.888863] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52b80cdf-fb44-3229-c880-9314a3cdac64" [ 987.888863] env[62346]: _type = "Task" [ 987.888863] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.898939] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52b80cdf-fb44-3229-c880-9314a3cdac64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.959056] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 987.959353] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 987.959623] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Deleting the datastore file [datastore2] 1157187b-7051-4921-bd95-9ef3e2d17104 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 987.959896] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcd8d82e-d9f3-461d-be2b-32e3b002c2a9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.966715] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 987.966715] env[62346]: value = "task-4891662" [ 987.966715] env[62346]: _type = "Task" [ 987.966715] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.975349] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': task-4891662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.399740] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 988.400032] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Creating directory with path [datastore2] vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.400272] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf52f2c6-d743-4bdd-8675-6f851b0d639d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.413702] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Created directory with path [datastore2] vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.413897] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Fetch image to [datastore2] vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 988.414078] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 988.414845] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c93a8-920e-4997-99e0-c564517670ce {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.422577] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a37b33-4cf8-47fb-999e-45f0a4fd18f8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.432394] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01ea3e2-1391-4f50-9a70-0e3dcbf7c2d2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.464840] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fd2e2a-128b-4f98-881d-c71dc13eea50 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.478205] env[62346]: DEBUG oslo_vmware.api [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': task-4891662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069463} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.478524] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d6266f30-7224-47ff-83c5-a5009c71c788 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.480884] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.481098] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 988.481279] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 988.481460] env[62346]: INFO nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Took 0.61 seconds to destroy the instance on the hypervisor. [ 988.484143] env[62346]: DEBUG nova.compute.claims [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 988.484354] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.484576] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.515391] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 988.576523] env[62346]: DEBUG oslo_vmware.rw_handles [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 988.637330] env[62346]: DEBUG oslo_vmware.rw_handles [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 988.637568] env[62346]: DEBUG oslo_vmware.rw_handles [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 989.018735] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb1bf4c-044b-4518-99b5-e58624f86e6e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.027457] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7624c985-6346-4fec-a748-06bc4a7b2776 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.059424] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e692da-def9-4aab-a838-b5c8867d5731 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.067767] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d595d37c-dc8a-4218-9431-2d9da3f15f10 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.083698] env[62346]: DEBUG nova.compute.provider_tree [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.093948] env[62346]: DEBUG nova.scheduler.client.report [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.109986] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.625s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.110479] env[62346]: ERROR nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.110479] env[62346]: Faults: ['InvalidArgument'] [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Traceback (most recent call last): [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self.driver.spawn(context, instance, image_meta, [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self._vmops.spawn(context, instance, image_meta, injected_files, [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self._fetch_image_if_missing(context, vi) [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] image_cache(vi, tmp_image_ds_loc) [ 989.110479] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] vm_util.copy_virtual_disk( [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] session._wait_for_task(vmdk_copy_task) [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] return self.wait_for_task(task_ref) [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] return evt.wait() [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] result = hub.switch() [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] return self.greenlet.switch() [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 989.110894] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] self.f(*self.args, **self.kw) [ 989.111220] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 989.111220] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] raise exceptions.translate_fault(task_info.error) [ 989.111220] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.111220] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Faults: ['InvalidArgument'] [ 989.111220] env[62346]: ERROR nova.compute.manager [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] [ 989.111220] env[62346]: DEBUG nova.compute.utils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 989.112795] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Build of instance 1157187b-7051-4921-bd95-9ef3e2d17104 was re-scheduled: A specified parameter was not correct: fileType [ 989.112795] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 989.113182] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 989.113356] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 989.113504] env[62346]: DEBUG nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 989.113665] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 989.556442] env[62346]: DEBUG nova.network.neutron [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.571908] env[62346]: INFO nova.compute.manager [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Took 0.46 seconds to deallocate network for instance. [ 989.694871] env[62346]: INFO nova.scheduler.client.report [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Deleted allocations for instance 1157187b-7051-4921-bd95-9ef3e2d17104 [ 989.719047] env[62346]: DEBUG oslo_concurrency.lockutils [None req-353cebf9-6002-4a19-940b-b667edf73fab tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "1157187b-7051-4921-bd95-9ef3e2d17104" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 440.328s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.719047] env[62346]: DEBUG oslo_concurrency.lockutils [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "1157187b-7051-4921-bd95-9ef3e2d17104" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 240.679s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.719047] env[62346]: DEBUG oslo_concurrency.lockutils [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "1157187b-7051-4921-bd95-9ef3e2d17104-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.719368] env[62346]: DEBUG oslo_concurrency.lockutils [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "1157187b-7051-4921-bd95-9ef3e2d17104-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.719368] env[62346]: DEBUG oslo_concurrency.lockutils [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "1157187b-7051-4921-bd95-9ef3e2d17104-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.722975] env[62346]: INFO nova.compute.manager [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Terminating instance [ 989.729310] env[62346]: DEBUG nova.compute.manager [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 989.730021] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 989.730440] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72957d97-6291-4f11-8d8a-ae657f6cb0d1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.735030] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 989.745329] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1aa580-f3ed-44ff-bde9-3a0b98753aa6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.777899] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1157187b-7051-4921-bd95-9ef3e2d17104 could not be found. [ 989.778154] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 989.778347] env[62346]: INFO nova.compute.manager [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Took 0.05 seconds to destroy the instance on the hypervisor. [ 989.778637] env[62346]: DEBUG oslo.service.loopingcall [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.778875] env[62346]: DEBUG nova.compute.manager [-] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 989.778947] env[62346]: DEBUG nova.network.neutron [-] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 989.800110] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.800375] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.801943] env[62346]: INFO nova.compute.claims [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.815635] env[62346]: DEBUG nova.network.neutron [-] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.835749] env[62346]: INFO nova.compute.manager [-] [instance: 1157187b-7051-4921-bd95-9ef3e2d17104] Took 0.06 seconds to deallocate network for instance. [ 989.937431] env[62346]: DEBUG oslo_concurrency.lockutils [None req-981458ce-4d3d-4ed8-aa17-7cc83ea4bbfd tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "1157187b-7051-4921-bd95-9ef3e2d17104" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.284725] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5831d587-43c4-4c24-bf73-ab0536370062 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.293766] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231017dc-9875-4e19-8e74-8d1cb86c26d6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.325453] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a51ee43-c2c8-462e-bbbc-262dc5a0a3c6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.334413] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62da53f7-7346-4312-a0bb-b764a028e0fc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.349183] env[62346]: DEBUG nova.compute.provider_tree [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.358371] env[62346]: DEBUG nova.scheduler.client.report [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.377681] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.577s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.378190] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 990.414526] env[62346]: DEBUG nova.compute.utils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.415882] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 990.416060] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 990.436029] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 990.496280] env[62346]: DEBUG nova.policy [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d43e25d1edd4053a15a8027cbd8529f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8317b2c0c98049fe8044a0edb4bca89c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 990.504430] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 990.530570] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.531165] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.531492] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.535018] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.535018] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.535018] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.535018] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.535018] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.535388] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.535388] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.535388] env[62346]: DEBUG nova.virt.hardware [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.535388] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7c770f-7bb6-4f23-9a09-fe7f54e68333 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.545178] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82951b90-c1cf-41a7-a5c6-72522dac2cf4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.983716] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Successfully created port: 9f9909e1-a900-4130-bab0-7fa443008565 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.756428] env[62346]: DEBUG nova.compute.manager [req-dbed999a-9861-4aa5-b7a9-a8a228d2697e req-2cebe8e7-af38-4b9f-96f4-9dc1a3cfdaf3 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Received event network-vif-plugged-9f9909e1-a900-4130-bab0-7fa443008565 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 991.756716] env[62346]: DEBUG oslo_concurrency.lockutils [req-dbed999a-9861-4aa5-b7a9-a8a228d2697e req-2cebe8e7-af38-4b9f-96f4-9dc1a3cfdaf3 service nova] Acquiring lock "4d8eeb53-06e4-423f-8719-10f5283175b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.756974] env[62346]: DEBUG oslo_concurrency.lockutils [req-dbed999a-9861-4aa5-b7a9-a8a228d2697e req-2cebe8e7-af38-4b9f-96f4-9dc1a3cfdaf3 service nova] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.757193] env[62346]: DEBUG oslo_concurrency.lockutils [req-dbed999a-9861-4aa5-b7a9-a8a228d2697e req-2cebe8e7-af38-4b9f-96f4-9dc1a3cfdaf3 service nova] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.757399] env[62346]: DEBUG nova.compute.manager [req-dbed999a-9861-4aa5-b7a9-a8a228d2697e req-2cebe8e7-af38-4b9f-96f4-9dc1a3cfdaf3 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] No waiting events found dispatching network-vif-plugged-9f9909e1-a900-4130-bab0-7fa443008565 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 991.757644] env[62346]: WARNING nova.compute.manager [req-dbed999a-9861-4aa5-b7a9-a8a228d2697e req-2cebe8e7-af38-4b9f-96f4-9dc1a3cfdaf3 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Received unexpected event network-vif-plugged-9f9909e1-a900-4130-bab0-7fa443008565 for instance with vm_state building and task_state spawning. [ 991.893617] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Successfully updated port: 9f9909e1-a900-4130-bab0-7fa443008565 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.907252] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "refresh_cache-4d8eeb53-06e4-423f-8719-10f5283175b4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.907437] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired lock "refresh_cache-4d8eeb53-06e4-423f-8719-10f5283175b4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.907541] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 991.958692] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 992.175298] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Updating instance_info_cache with network_info: [{"id": "9f9909e1-a900-4130-bab0-7fa443008565", "address": "fa:16:3e:3a:cf:0a", "network": {"id": "2779d93f-257d-463e-b8de-ec5bc3180dba", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-521829042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8317b2c0c98049fe8044a0edb4bca89c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f9909e1-a9", "ovs_interfaceid": "9f9909e1-a900-4130-bab0-7fa443008565", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.187195] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Releasing lock "refresh_cache-4d8eeb53-06e4-423f-8719-10f5283175b4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.187587] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance network_info: |[{"id": "9f9909e1-a900-4130-bab0-7fa443008565", "address": "fa:16:3e:3a:cf:0a", "network": {"id": "2779d93f-257d-463e-b8de-ec5bc3180dba", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-521829042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8317b2c0c98049fe8044a0edb4bca89c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f9909e1-a9", "ovs_interfaceid": "9f9909e1-a900-4130-bab0-7fa443008565", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 992.188375] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:cf:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f9909e1-a900-4130-bab0-7fa443008565', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.195946] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating folder: Project (8317b2c0c98049fe8044a0edb4bca89c). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 992.196531] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc4f99a5-8ec8-4183-91fa-3ef13faf638f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.207124] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Created folder: Project (8317b2c0c98049fe8044a0edb4bca89c) in parent group-v953204. [ 992.207344] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating folder: Instances. Parent ref: group-v953263. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 992.207644] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78761bc3-3ea5-48b3-bdb2-69526d7ff1ff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.217791] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Created folder: Instances in parent group-v953263. [ 992.218066] env[62346]: DEBUG oslo.service.loopingcall [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.218233] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 992.218440] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-885a0eaa-9ffe-4da9-831e-47bcb01fec98 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.238964] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.238964] env[62346]: value = "task-4891665" [ 992.238964] env[62346]: _type = "Task" [ 992.238964] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.248893] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891665, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.749909] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891665, 'name': CreateVM_Task, 'duration_secs': 0.315343} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.750168] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 992.750962] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.751142] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.752327] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 992.752327] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb212f61-859a-4fde-985a-504945cf2bc2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.757317] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 992.757317] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]527f643d-76d1-fb95-73f2-e759df66ff8d" [ 992.757317] env[62346]: _type = "Task" [ 992.757317] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.767912] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]527f643d-76d1-fb95-73f2-e759df66ff8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.838727] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.268447] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.268447] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.268862] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.782496] env[62346]: DEBUG nova.compute.manager [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Received event network-changed-9f9909e1-a900-4130-bab0-7fa443008565 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 993.782718] env[62346]: DEBUG nova.compute.manager [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Refreshing instance network info cache due to event network-changed-9f9909e1-a900-4130-bab0-7fa443008565. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 993.782928] env[62346]: DEBUG oslo_concurrency.lockutils [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] Acquiring lock "refresh_cache-4d8eeb53-06e4-423f-8719-10f5283175b4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.783247] env[62346]: DEBUG oslo_concurrency.lockutils [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] Acquired lock "refresh_cache-4d8eeb53-06e4-423f-8719-10f5283175b4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.783247] env[62346]: DEBUG nova.network.neutron [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Refreshing network info cache for port 9f9909e1-a900-4130-bab0-7fa443008565 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 994.064788] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "4d8eeb53-06e4-423f-8719-10f5283175b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.392114] env[62346]: DEBUG nova.network.neutron [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Updated VIF entry in instance network info cache for port 9f9909e1-a900-4130-bab0-7fa443008565. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 994.392649] env[62346]: DEBUG nova.network.neutron [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Updating instance_info_cache with network_info: [{"id": "9f9909e1-a900-4130-bab0-7fa443008565", "address": "fa:16:3e:3a:cf:0a", "network": {"id": "2779d93f-257d-463e-b8de-ec5bc3180dba", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-521829042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8317b2c0c98049fe8044a0edb4bca89c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f9909e1-a9", "ovs_interfaceid": "9f9909e1-a900-4130-bab0-7fa443008565", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.403949] env[62346]: DEBUG oslo_concurrency.lockutils [req-b5528b6e-1839-4982-8161-d1dcb86d5f1c req-8001859d-4f78-4565-b14d-8aab35419d44 service nova] Releasing lock "refresh_cache-4d8eeb53-06e4-423f-8719-10f5283175b4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.594013] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.594374] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.173881] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "da750b9f-b4d7-4c55-acfc-289222af9067" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.174261] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "da750b9f-b4d7-4c55-acfc-289222af9067" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.224467] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "0da3e07d-07a7-4c1a-a3aa-ae4973311d80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.224840] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "0da3e07d-07a7-4c1a-a3aa-ae4973311d80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.121763] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e51964d6-2612-4e81-b97f-7216f3e20d97 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "be701e07-33b0-48de-962b-5051d1c2e2ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.122184] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e51964d6-2612-4e81-b97f-7216f3e20d97 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "be701e07-33b0-48de-962b-5051d1c2e2ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.907850] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1340031c-c495-43e3-a842-0ade251347a2 tempest-ServerAddressesNegativeTestJSON-1574771368 tempest-ServerAddressesNegativeTestJSON-1574771368-project-member] Acquiring lock "dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.907850] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1340031c-c495-43e3-a842-0ade251347a2 tempest-ServerAddressesNegativeTestJSON-1574771368 tempest-ServerAddressesNegativeTestJSON-1574771368-project-member] Lock "dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.685407] env[62346]: DEBUG oslo_concurrency.lockutils [None req-daacd5df-44cb-44c4-8ef2-2b0301aa1846 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Acquiring lock "0e30d5a5-0c28-411a-b0fd-8385d86323c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.685684] env[62346]: DEBUG oslo_concurrency.lockutils [None req-daacd5df-44cb-44c4-8ef2-2b0301aa1846 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Lock "0e30d5a5-0c28-411a-b0fd-8385d86323c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.522196] env[62346]: WARNING oslo_vmware.rw_handles [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1036.522196] env[62346]: ERROR oslo_vmware.rw_handles [ 1036.522196] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1036.524735] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1036.525601] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Copying Virtual Disk [datastore2] vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/d4ba6285-bd00-4cb9-9eb8-230e28408d41/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1036.525601] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0ef2963-f4b2-478f-a398-57c0f799b999 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.534892] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Waiting for the task: (returnval){ [ 1036.534892] env[62346]: value = "task-4891666" [ 1036.534892] env[62346]: _type = "Task" [ 1036.534892] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.548527] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Task: {'id': task-4891666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.051025] env[62346]: DEBUG oslo_vmware.exceptions [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1037.051025] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.051025] env[62346]: ERROR nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1037.051025] env[62346]: Faults: ['InvalidArgument'] [ 1037.051025] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Traceback (most recent call last): [ 1037.051025] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1037.051025] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] yield resources [ 1037.051025] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self.driver.spawn(context, instance, image_meta, [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self._fetch_image_if_missing(context, vi) [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] image_cache(vi, tmp_image_ds_loc) [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] vm_util.copy_virtual_disk( [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] session._wait_for_task(vmdk_copy_task) [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1037.051614] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] return self.wait_for_task(task_ref) [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] return evt.wait() [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] result = hub.switch() [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] return self.greenlet.switch() [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self.f(*self.args, **self.kw) [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] raise exceptions.translate_fault(task_info.error) [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Faults: ['InvalidArgument'] [ 1037.052038] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] [ 1037.052443] env[62346]: INFO nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Terminating instance [ 1037.054013] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.054013] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.054013] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1037.054013] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1037.054013] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12235f85-8d19-409a-b70b-05f0af65e582 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.056327] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed9e8ac-f65c-4f5d-86d3-b02bc00e9c10 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.063964] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1037.064243] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ea388e5-09b2-43a5-b0a8-487806e36f46 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.066708] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.066884] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1037.067904] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16b9d150-2406-47af-8d53-e31cc48d5466 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.073140] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Waiting for the task: (returnval){ [ 1037.073140] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ebc335-d3fc-00da-9017-18c842047f1e" [ 1037.073140] env[62346]: _type = "Task" [ 1037.073140] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.081297] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ebc335-d3fc-00da-9017-18c842047f1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.137454] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1037.137683] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1037.138030] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Deleting the datastore file [datastore2] a040a266-a77e-4ef4-ac34-df4781f2a757 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.138180] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7aa9beee-2d52-41e6-af2a-3289652a24d5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.148036] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Waiting for the task: (returnval){ [ 1037.148036] env[62346]: value = "task-4891668" [ 1037.148036] env[62346]: _type = "Task" [ 1037.148036] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.156874] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Task: {'id': task-4891668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.219642] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.233927] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.234304] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.236925] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.236925] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1037.236925] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f08f437-d506-4139-bf65-56db3acc715c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.248433] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39236f37-18c7-403b-b249-d32300c4feb3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.262852] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc8765c-89ce-4e16-9b16-8c93e7ee2cd9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.270990] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65600310-52dc-4ed4-8b49-378a76069789 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.305737] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1037.305926] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.306181] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.395541] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance a040a266-a77e-4ef4-ac34-df4781f2a757 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.395737] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.395803] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.395920] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.396054] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.396174] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.396288] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.398024] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.398024] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.398024] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1037.409046] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c3c27528-211f-4c7b-ad25-fb6f2d8c7faf has been deleted (perhaps locally). Deleting allocations that remained for this instance against this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1753}} [ 1037.425715] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cannot delete allocation for c3c27528-211f-4c7b-ad25-fb6f2d8c7faf consumer in placement as consumer does not exist {{(pid=62346) delete_allocation_for_instance /opt/stack/nova/nova/scheduler/client/report.py:2199}} [ 1037.440011] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.456458] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.471410] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.487380] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.502723] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 723d75ac-364c-4e21-a664-879e99839c16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.518656] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67812f8f-9aee-42ac-b22e-5761240cbb7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.529512] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29516cd3-7d37-40d0-9f8a-9215ae69938b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.545311] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.566722] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.581300] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.587221] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1037.587221] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Creating directory with path [datastore2] vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.587221] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4b3dff0-c3c9-4476-90f4-a4e1a37ef8cc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.593603] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0da3e07d-07a7-4c1a-a3aa-ae4973311d80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.602447] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Created directory with path [datastore2] vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.602902] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Fetch image to [datastore2] vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1037.602902] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1037.603657] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5290fb76-ded6-4f82-af2f-ef8b8690ef1f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.613223] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance be701e07-33b0-48de-962b-5051d1c2e2ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.617541] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c4f2bd-8328-4bed-a01e-5bc00e7ebbca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.626084] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.628532] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7342ce08-ad10-4674-bb4f-0e6b08841427 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.640483] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0e30d5a5-0c28-411a-b0fd-8385d86323c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1037.640483] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1037.640483] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '29', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_20ec99e588e349d6b37d8222d8e5019c': '1', 'io_workload': '10', 'num_proj_e556bdf0bd9c4ea9a15e33b2ecde4f11': '1', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1037.677238] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b416fc72-2675-4f59-98e0-f1ec6ad4f749 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.691088] env[62346]: DEBUG oslo_vmware.api [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Task: {'id': task-4891668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086151} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.691088] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.693378] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1037.693378] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1037.693378] env[62346]: INFO nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1037.693648] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bc1a3c69-7a5a-4013-b31f-4235955f69d2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.696279] env[62346]: DEBUG nova.compute.claims [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1037.698042] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.727242] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1037.809614] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1037.873179] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1037.873179] env[62346]: DEBUG oslo_vmware.rw_handles [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1038.125183] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b517c2-2158-4d9b-9b94-494261222358 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.133884] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d08b027-4589-46a8-aa3c-7040994d722c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.166387] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f761f491-b56f-4d7e-95d4-190a29f8f42c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.177313] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9ea441-caae-4004-b32c-4a7cd2348a2f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.192232] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.200504] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1038.215445] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1038.215585] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.909s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.215841] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.519s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.471834] env[62346]: DEBUG oslo_concurrency.lockutils [None req-02c4ecd3-7b62-4f9b-9417-f974dd7e1ffc tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Acquiring lock "793e37e0-7e21-49f9-aaf2-44a42af1f4b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.472109] env[62346]: DEBUG oslo_concurrency.lockutils [None req-02c4ecd3-7b62-4f9b-9417-f974dd7e1ffc tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "793e37e0-7e21-49f9-aaf2-44a42af1f4b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.675105] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f710b39-07d3-4c59-b5ca-f3fc0a101293 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.683296] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d70472-9183-4545-bdaf-70dbbe6844e5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.714557] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a79425-8841-44d9-b7ca-fbe8503bcf50 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.724929] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859db1cc-573f-4b63-9c25-016d797a5073 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.742698] env[62346]: DEBUG nova.compute.provider_tree [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.753421] env[62346]: DEBUG nova.scheduler.client.report [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1038.772039] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.554s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.772039] env[62346]: ERROR nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1038.772039] env[62346]: Faults: ['InvalidArgument'] [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Traceback (most recent call last): [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self.driver.spawn(context, instance, image_meta, [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1038.772039] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self._fetch_image_if_missing(context, vi) [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] image_cache(vi, tmp_image_ds_loc) [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] vm_util.copy_virtual_disk( [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] session._wait_for_task(vmdk_copy_task) [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] return self.wait_for_task(task_ref) [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] return evt.wait() [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] result = hub.switch() [ 1038.772818] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] return self.greenlet.switch() [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] self.f(*self.args, **self.kw) [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] raise exceptions.translate_fault(task_info.error) [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Faults: ['InvalidArgument'] [ 1038.773267] env[62346]: ERROR nova.compute.manager [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] [ 1038.773267] env[62346]: DEBUG nova.compute.utils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1038.773267] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Build of instance a040a266-a77e-4ef4-ac34-df4781f2a757 was re-scheduled: A specified parameter was not correct: fileType [ 1038.773675] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1038.773675] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1038.773675] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1038.773675] env[62346]: DEBUG nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1038.773800] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1039.259920] env[62346]: DEBUG nova.network.neutron [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.280776] env[62346]: INFO nova.compute.manager [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Took 0.51 seconds to deallocate network for instance. [ 1039.336089] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dd689a49-1ef1-40f8-8e8f-020bf16d7048 tempest-ServersNegativeTestMultiTenantJSON-1760784361 tempest-ServersNegativeTestMultiTenantJSON-1760784361-project-member] Acquiring lock "55fe8916-2b2f-4912-ae6b-4a06db60fedc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.336089] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dd689a49-1ef1-40f8-8e8f-020bf16d7048 tempest-ServersNegativeTestMultiTenantJSON-1760784361 tempest-ServersNegativeTestMultiTenantJSON-1760784361-project-member] Lock "55fe8916-2b2f-4912-ae6b-4a06db60fedc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.404840] env[62346]: INFO nova.scheduler.client.report [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Deleted allocations for instance a040a266-a77e-4ef4-ac34-df4781f2a757 [ 1039.446576] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d75c05eb-3d12-4315-b9b6-520aab167f2f tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 473.042s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.447460] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 274.817s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.447460] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Acquiring lock "a040a266-a77e-4ef4-ac34-df4781f2a757-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.447576] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.447669] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.454124] env[62346]: INFO nova.compute.manager [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Terminating instance [ 1039.459992] env[62346]: DEBUG nova.compute.manager [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1039.459992] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1039.460203] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfcac563-4ec0-4a05-a709-cfbc14b96e62 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.465227] env[62346]: DEBUG nova.compute.manager [None req-dcc006c1-13d0-4ff0-947c-14609f9ed98b tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] [instance: 9feb7773-2b61-464f-878b-0a8ee21a22c0] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.476496] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7acc06e-58a7-4a08-84ce-273c4ae07998 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.497768] env[62346]: DEBUG nova.compute.manager [None req-dcc006c1-13d0-4ff0-947c-14609f9ed98b tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] [instance: 9feb7773-2b61-464f-878b-0a8ee21a22c0] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.513021] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a040a266-a77e-4ef4-ac34-df4781f2a757 could not be found. [ 1039.513021] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1039.513021] env[62346]: INFO nova.compute.manager [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1039.513021] env[62346]: DEBUG oslo.service.loopingcall [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.513021] env[62346]: DEBUG nova.compute.manager [-] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1039.513355] env[62346]: DEBUG nova.network.neutron [-] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1039.527231] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dcc006c1-13d0-4ff0-947c-14609f9ed98b tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] Lock "9feb7773-2b61-464f-878b-0a8ee21a22c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.298s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.541653] env[62346]: DEBUG nova.compute.manager [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: c3c4fb8b-3897-4c85-b40c-710dc4d1fb16] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.546125] env[62346]: DEBUG nova.network.neutron [-] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.558987] env[62346]: INFO nova.compute.manager [-] [instance: a040a266-a77e-4ef4-ac34-df4781f2a757] Took 0.05 seconds to deallocate network for instance. [ 1039.578723] env[62346]: DEBUG nova.compute.manager [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: c3c4fb8b-3897-4c85-b40c-710dc4d1fb16] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.603133] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "c3c4fb8b-3897-4c85-b40c-710dc4d1fb16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.927s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.616581] env[62346]: DEBUG nova.compute.manager [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: 92241377-06e3-41e1-bae5-718f1ae5908b] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.651775] env[62346]: DEBUG nova.compute.manager [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: 92241377-06e3-41e1-bae5-718f1ae5908b] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.666951] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8e06140-d0e6-4de3-a885-2110c35ed266 tempest-VolumesAssistedSnapshotsTest-2079667365 tempest-VolumesAssistedSnapshotsTest-2079667365-project-member] Lock "a040a266-a77e-4ef4-ac34-df4781f2a757" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.220s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.710919] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e924c9e9-bb96-4e11-9c06-9321355289d7 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "92241377-06e3-41e1-bae5-718f1ae5908b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.998s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.721559] env[62346]: DEBUG nova.compute.manager [None req-523e7672-eed8-4dde-8e5d-6b85c3289a9f tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] [instance: 69db1596-7767-40cc-9872-45574c4f681e] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.747347] env[62346]: DEBUG nova.compute.manager [None req-523e7672-eed8-4dde-8e5d-6b85c3289a9f tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] [instance: 69db1596-7767-40cc-9872-45574c4f681e] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.773214] env[62346]: DEBUG oslo_concurrency.lockutils [None req-523e7672-eed8-4dde-8e5d-6b85c3289a9f tempest-ServerRescueNegativeTestJSON-1740735354 tempest-ServerRescueNegativeTestJSON-1740735354-project-member] Lock "69db1596-7767-40cc-9872-45574c4f681e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.348s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.783029] env[62346]: DEBUG nova.compute.manager [None req-94660edd-3bc7-4245-9207-432940d77db4 tempest-ServerMetadataTestJSON-951334850 tempest-ServerMetadataTestJSON-951334850-project-member] [instance: 0889843e-cf4a-4ab3-9702-a2599fac93ff] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.810880] env[62346]: DEBUG nova.compute.manager [None req-94660edd-3bc7-4245-9207-432940d77db4 tempest-ServerMetadataTestJSON-951334850 tempest-ServerMetadataTestJSON-951334850-project-member] [instance: 0889843e-cf4a-4ab3-9702-a2599fac93ff] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.833100] env[62346]: DEBUG oslo_concurrency.lockutils [None req-94660edd-3bc7-4245-9207-432940d77db4 tempest-ServerMetadataTestJSON-951334850 tempest-ServerMetadataTestJSON-951334850-project-member] Lock "0889843e-cf4a-4ab3-9702-a2599fac93ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.273s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.843607] env[62346]: DEBUG nova.compute.manager [None req-b8f69016-ad1f-4512-981b-91171ed5c403 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 6f96e666-ac1d-48a1-b663-86f1bb9b64d6] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.868661] env[62346]: DEBUG nova.compute.manager [None req-b8f69016-ad1f-4512-981b-91171ed5c403 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 6f96e666-ac1d-48a1-b663-86f1bb9b64d6] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.906294] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b8f69016-ad1f-4512-981b-91171ed5c403 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "6f96e666-ac1d-48a1-b663-86f1bb9b64d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.016s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.915333] env[62346]: DEBUG nova.compute.manager [None req-d97360ba-a209-4894-afb7-36665ef1afde tempest-ServersTestFqdnHostnames-80151842 tempest-ServersTestFqdnHostnames-80151842-project-member] [instance: 823c0133-92a4-4f86-9df5-7fdf57ccc9f5] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1039.939509] env[62346]: DEBUG nova.compute.manager [None req-d97360ba-a209-4894-afb7-36665ef1afde tempest-ServersTestFqdnHostnames-80151842 tempest-ServersTestFqdnHostnames-80151842-project-member] [instance: 823c0133-92a4-4f86-9df5-7fdf57ccc9f5] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1039.979883] env[62346]: DEBUG oslo_concurrency.lockutils [None req-d97360ba-a209-4894-afb7-36665ef1afde tempest-ServersTestFqdnHostnames-80151842 tempest-ServersTestFqdnHostnames-80151842-project-member] Lock "823c0133-92a4-4f86-9df5-7fdf57ccc9f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.141s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.988735] env[62346]: DEBUG nova.compute.manager [None req-688d0e48-23e1-46c3-a79b-f3bd2d67257b tempest-ServersTestJSON-1916021883 tempest-ServersTestJSON-1916021883-project-member] [instance: 7f1f9951-6292-4b19-8567-16495b9f90cd] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.011921] env[62346]: DEBUG nova.compute.manager [None req-688d0e48-23e1-46c3-a79b-f3bd2d67257b tempest-ServersTestJSON-1916021883 tempest-ServersTestJSON-1916021883-project-member] [instance: 7f1f9951-6292-4b19-8567-16495b9f90cd] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.032200] env[62346]: DEBUG oslo_concurrency.lockutils [None req-688d0e48-23e1-46c3-a79b-f3bd2d67257b tempest-ServersTestJSON-1916021883 tempest-ServersTestJSON-1916021883-project-member] Lock "7f1f9951-6292-4b19-8567-16495b9f90cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.686s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.045677] env[62346]: DEBUG nova.compute.manager [None req-1346ee98-66e6-4967-bfb9-ef5923f12753 tempest-ServerActionsTestOtherB-773440793 tempest-ServerActionsTestOtherB-773440793-project-member] [instance: 1ee08a27-8aa5-49b8-abf7-7d69dee25a4d] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.070371] env[62346]: DEBUG nova.compute.manager [None req-1346ee98-66e6-4967-bfb9-ef5923f12753 tempest-ServerActionsTestOtherB-773440793 tempest-ServerActionsTestOtherB-773440793-project-member] [instance: 1ee08a27-8aa5-49b8-abf7-7d69dee25a4d] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.093014] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1346ee98-66e6-4967-bfb9-ef5923f12753 tempest-ServerActionsTestOtherB-773440793 tempest-ServerActionsTestOtherB-773440793-project-member] Lock "1ee08a27-8aa5-49b8-abf7-7d69dee25a4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.057s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.103809] env[62346]: DEBUG nova.compute.manager [None req-3c672a6b-7f00-4ad4-98c8-ccf7ada2f880 tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] [instance: 4b4ad569-a890-4ade-aa72-313f8d87d430] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.127346] env[62346]: DEBUG nova.compute.manager [None req-3c672a6b-7f00-4ad4-98c8-ccf7ada2f880 tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] [instance: 4b4ad569-a890-4ade-aa72-313f8d87d430] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.148429] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3c672a6b-7f00-4ad4-98c8-ccf7ada2f880 tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] Lock "4b4ad569-a890-4ade-aa72-313f8d87d430" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.110s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.157995] env[62346]: DEBUG nova.compute.manager [None req-9c7bb4b6-829f-4df9-a3a8-aef957e7458f tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] [instance: d8fa321b-9703-4b46-b2c3-5889b03a9116] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.184015] env[62346]: DEBUG nova.compute.manager [None req-9c7bb4b6-829f-4df9-a3a8-aef957e7458f tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] [instance: d8fa321b-9703-4b46-b2c3-5889b03a9116] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.208433] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9c7bb4b6-829f-4df9-a3a8-aef957e7458f tempest-ServerShowV247Test-437408272 tempest-ServerShowV247Test-437408272-project-member] Lock "d8fa321b-9703-4b46-b2c3-5889b03a9116" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.012s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.217672] env[62346]: DEBUG nova.compute.manager [None req-07ffd870-8465-4f81-93eb-e5028eb82d69 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] [instance: 40b09c7e-1206-4b4c-a1a6-e556a5af2743] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.242934] env[62346]: DEBUG nova.compute.manager [None req-07ffd870-8465-4f81-93eb-e5028eb82d69 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] [instance: 40b09c7e-1206-4b4c-a1a6-e556a5af2743] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.269743] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07ffd870-8465-4f81-93eb-e5028eb82d69 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Lock "40b09c7e-1206-4b4c-a1a6-e556a5af2743" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.054s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.280156] env[62346]: DEBUG nova.compute.manager [None req-ba18bcbd-7386-486c-9d9a-f2bd47db6909 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: c3c27528-211f-4c7b-ad25-fb6f2d8c7faf] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.306117] env[62346]: DEBUG nova.compute.manager [None req-ba18bcbd-7386-486c-9d9a-f2bd47db6909 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: c3c27528-211f-4c7b-ad25-fb6f2d8c7faf] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.335731] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ba18bcbd-7386-486c-9d9a-f2bd47db6909 tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "c3c27528-211f-4c7b-ad25-fb6f2d8c7faf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.273s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.350967] env[62346]: DEBUG nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.409910] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.410206] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.411850] env[62346]: INFO nova.compute.claims [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1040.786614] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dcf4c6-b627-4cb6-9725-6ed29ade3c65 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.795570] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9342468-4814-4143-91ed-fbfe9f53bdf1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.829336] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74f81cf-8481-4db5-be1e-4efde710a0dd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.837491] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3bf286-dd17-4764-8386-1e6c10baeac2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.851697] env[62346]: DEBUG nova.compute.provider_tree [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.861031] env[62346]: DEBUG nova.scheduler.client.report [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1040.875647] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.465s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.876158] env[62346]: DEBUG nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1040.878647] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.908337] env[62346]: DEBUG nova.compute.claims [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1040.908534] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.908771] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.236172] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d717d7e-9295-4862-b161-15b82887f4ed {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.243732] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd38aa63-593d-4185-8595-7f9269eab303 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.274363] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a91cb2-3d3e-4df1-abab-febbf62d7fda {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.282104] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9bfd4d-6719-41a1-a8f4-b3e6f08192b2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.296435] env[62346]: DEBUG nova.compute.provider_tree [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.305827] env[62346]: DEBUG nova.scheduler.client.report [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.320098] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.411s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.320905] env[62346]: DEBUG nova.compute.utils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Conflict updating instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1041.322460] env[62346]: DEBUG nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance disappeared during build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 1041.322630] env[62346]: DEBUG nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1041.322840] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "refresh_cache-b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.322982] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquired lock "refresh_cache-b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.323158] env[62346]: DEBUG nova.network.neutron [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1041.357829] env[62346]: DEBUG nova.network.neutron [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1041.649112] env[62346]: DEBUG nova.network.neutron [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.659475] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Releasing lock "refresh_cache-b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.659755] env[62346]: DEBUG nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1041.660545] env[62346]: DEBUG nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1041.660545] env[62346]: DEBUG nova.network.neutron [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.682074] env[62346]: DEBUG nova.network.neutron [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1041.689171] env[62346]: DEBUG nova.network.neutron [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.697162] env[62346]: INFO nova.compute.manager [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Took 0.04 seconds to deallocate network for instance. [ 1041.766635] env[62346]: INFO nova.scheduler.client.report [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Deleted allocations for instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2 [ 1041.766635] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3236d5e8-e71b-40d7-a3cc-0918d9b09390 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.089s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.767584] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.889s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.767824] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.768035] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.768229] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.772763] env[62346]: INFO nova.compute.manager [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Terminating instance [ 1041.774485] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquiring lock "refresh_cache-b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.774661] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Acquired lock "refresh_cache-b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.774816] env[62346]: DEBUG nova.network.neutron [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1041.776629] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.808557] env[62346]: DEBUG nova.network.neutron [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1041.844010] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.844424] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.845872] env[62346]: INFO nova.compute.claims [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.911726] env[62346]: DEBUG nova.network.neutron [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.921616] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Releasing lock "refresh_cache-b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.922048] env[62346]: DEBUG nova.compute.manager [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1041.922250] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1041.923187] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6787fac5-b4b1-4515-ab42-2b9f3f4468e2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.935215] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccfaab1-8e88-4394-be9a-94df810d9263 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.971180] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b5224ca6-5c7c-4631-bcc1-199ceaf1dee2 could not be found. [ 1041.971434] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.971644] env[62346]: INFO nova.compute.manager [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1041.971902] env[62346]: DEBUG oslo.service.loopingcall [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.974771] env[62346]: DEBUG nova.compute.manager [-] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1041.974935] env[62346]: DEBUG nova.network.neutron [-] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.994265] env[62346]: DEBUG nova.network.neutron [-] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1042.003791] env[62346]: DEBUG nova.network.neutron [-] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.012982] env[62346]: INFO nova.compute.manager [-] [instance: b5224ca6-5c7c-4631-bcc1-199ceaf1dee2] Took 0.04 seconds to deallocate network for instance. [ 1042.106841] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b2c01dfd-3890-440b-a209-4f5ce502b2a3 tempest-DeleteServersAdminTestJSON-1384217777 tempest-DeleteServersAdminTestJSON-1384217777-project-member] Lock "b5224ca6-5c7c-4631-bcc1-199ceaf1dee2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.339s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.218934] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.219198] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.219428] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1042.220369] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1042.245140] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.245319] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.245452] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.245580] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.245704] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.245823] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.245939] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.246098] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.246230] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.246348] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1042.246494] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1042.247065] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.263284] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b67bf61-78f6-4cf5-b0b5-32159bf4f3ce {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.271860] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa310414-2bc3-4372-b262-223a0a539cfe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.301861] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b8b4e4-9862-46c2-a0d3-4a25d8749400 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.309727] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed0666c-c9da-46b6-9008-ef2434e300d5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.323938] env[62346]: DEBUG nova.compute.provider_tree [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.334415] env[62346]: DEBUG nova.scheduler.client.report [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.351431] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.507s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.351929] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1042.386816] env[62346]: DEBUG nova.compute.utils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1042.388342] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1042.388540] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1042.397509] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1042.445647] env[62346]: DEBUG nova.policy [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c601083f0a44da850b33189c701bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abc1ead3f9a9442ca0b85f152f94fe6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1042.464662] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1042.490747] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.490992] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.491163] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.491349] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.491496] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.491640] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.491896] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.492120] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.492337] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.492517] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.492685] env[62346]: DEBUG nova.virt.hardware [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.493590] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6783924e-90fe-43e9-9d5a-41753f476568 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.502031] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96531776-6885-4012-b184-4d06089e0c52 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.974417] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Successfully created port: 8629d8c2-394f-4273-a962-171b52644a96 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.220237] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.220237] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.220237] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.921838] env[62346]: DEBUG nova.compute.manager [req-52110f9c-0649-4395-88ec-ec84abb77d90 req-6f2cbba3-de0c-48ad-acae-d9c7b7c94c47 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Received event network-vif-plugged-8629d8c2-394f-4273-a962-171b52644a96 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1043.922126] env[62346]: DEBUG oslo_concurrency.lockutils [req-52110f9c-0649-4395-88ec-ec84abb77d90 req-6f2cbba3-de0c-48ad-acae-d9c7b7c94c47 service nova] Acquiring lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.922312] env[62346]: DEBUG oslo_concurrency.lockutils [req-52110f9c-0649-4395-88ec-ec84abb77d90 req-6f2cbba3-de0c-48ad-acae-d9c7b7c94c47 service nova] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.922453] env[62346]: DEBUG oslo_concurrency.lockutils [req-52110f9c-0649-4395-88ec-ec84abb77d90 req-6f2cbba3-de0c-48ad-acae-d9c7b7c94c47 service nova] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.922609] env[62346]: DEBUG nova.compute.manager [req-52110f9c-0649-4395-88ec-ec84abb77d90 req-6f2cbba3-de0c-48ad-acae-d9c7b7c94c47 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] No waiting events found dispatching network-vif-plugged-8629d8c2-394f-4273-a962-171b52644a96 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1043.922772] env[62346]: WARNING nova.compute.manager [req-52110f9c-0649-4395-88ec-ec84abb77d90 req-6f2cbba3-de0c-48ad-acae-d9c7b7c94c47 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Received unexpected event network-vif-plugged-8629d8c2-394f-4273-a962-171b52644a96 for instance with vm_state building and task_state spawning. [ 1043.995675] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Successfully updated port: 8629d8c2-394f-4273-a962-171b52644a96 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.007337] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "refresh_cache-c8d8504b-c17f-4b1e-9769-843e49df0ea4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.007512] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "refresh_cache-c8d8504b-c17f-4b1e-9769-843e49df0ea4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.007664] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1044.092027] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1044.370028] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Updating instance_info_cache with network_info: [{"id": "8629d8c2-394f-4273-a962-171b52644a96", "address": "fa:16:3e:a2:0a:ea", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8629d8c2-39", "ovs_interfaceid": "8629d8c2-394f-4273-a962-171b52644a96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.386063] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "refresh_cache-c8d8504b-c17f-4b1e-9769-843e49df0ea4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.386372] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance network_info: |[{"id": "8629d8c2-394f-4273-a962-171b52644a96", "address": "fa:16:3e:a2:0a:ea", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8629d8c2-39", "ovs_interfaceid": "8629d8c2-394f-4273-a962-171b52644a96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1044.386781] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:0a:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8629d8c2-394f-4273-a962-171b52644a96', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.395089] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating folder: Project (abc1ead3f9a9442ca0b85f152f94fe6b). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1044.396060] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c37351f-7f15-4f77-8727-336f83b35581 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.408352] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created folder: Project (abc1ead3f9a9442ca0b85f152f94fe6b) in parent group-v953204. [ 1044.408687] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating folder: Instances. Parent ref: group-v953266. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1044.409394] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6bdf128-f316-4b61-9c68-c3f9590c2c7f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.419488] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created folder: Instances in parent group-v953266. [ 1044.419886] env[62346]: DEBUG oslo.service.loopingcall [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.420166] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1044.420421] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbbbf8e4-8b50-4fac-adc3-befc2fa902d3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.440514] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.440514] env[62346]: value = "task-4891671" [ 1044.440514] env[62346]: _type = "Task" [ 1044.440514] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.450804] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891671, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.953605] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891671, 'name': CreateVM_Task, 'duration_secs': 0.295794} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.953930] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1044.954404] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.954647] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.954973] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.955254] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01365745-4518-48e6-80c7-372562311c42 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.960458] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1044.960458] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52973db2-7ddc-0df8-a36f-798213072c2e" [ 1044.960458] env[62346]: _type = "Task" [ 1044.960458] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.969271] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52973db2-7ddc-0df8-a36f-798213072c2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.220386] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.220713] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.220879] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1045.471925] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.472219] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.472444] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.955062] env[62346]: DEBUG nova.compute.manager [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Received event network-changed-8629d8c2-394f-4273-a962-171b52644a96 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1045.955335] env[62346]: DEBUG nova.compute.manager [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Refreshing instance network info cache due to event network-changed-8629d8c2-394f-4273-a962-171b52644a96. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1045.955471] env[62346]: DEBUG oslo_concurrency.lockutils [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] Acquiring lock "refresh_cache-c8d8504b-c17f-4b1e-9769-843e49df0ea4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.955612] env[62346]: DEBUG oslo_concurrency.lockutils [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] Acquired lock "refresh_cache-c8d8504b-c17f-4b1e-9769-843e49df0ea4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.955771] env[62346]: DEBUG nova.network.neutron [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Refreshing network info cache for port 8629d8c2-394f-4273-a962-171b52644a96 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.515887] env[62346]: DEBUG nova.network.neutron [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Updated VIF entry in instance network info cache for port 8629d8c2-394f-4273-a962-171b52644a96. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.516995] env[62346]: DEBUG nova.network.neutron [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Updating instance_info_cache with network_info: [{"id": "8629d8c2-394f-4273-a962-171b52644a96", "address": "fa:16:3e:a2:0a:ea", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8629d8c2-39", "ovs_interfaceid": "8629d8c2-394f-4273-a962-171b52644a96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.530169] env[62346]: DEBUG oslo_concurrency.lockutils [req-f213803b-3ff5-4547-81a4-2d3a4f47b27e req-609078bc-6781-430c-98bb-87cb933d09a9 service nova] Releasing lock "refresh_cache-c8d8504b-c17f-4b1e-9769-843e49df0ea4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.763796] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "88727b37-0f05-4551-ac87-e43385e0f76d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.764102] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "88727b37-0f05-4551-ac87-e43385e0f76d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.952744] env[62346]: WARNING oslo_vmware.rw_handles [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1085.952744] env[62346]: ERROR oslo_vmware.rw_handles [ 1085.953367] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1085.955755] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1085.956079] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Copying Virtual Disk [datastore2] vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/6fef0a13-b232-4813-9209-f6b0992efcf3/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1085.956424] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a942e8d-b1fa-4434-9818-066109e32e7a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.966097] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Waiting for the task: (returnval){ [ 1085.966097] env[62346]: value = "task-4891672" [ 1085.966097] env[62346]: _type = "Task" [ 1085.966097] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.974381] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Task: {'id': task-4891672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.477304] env[62346]: DEBUG oslo_vmware.exceptions [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1086.477469] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.478166] env[62346]: ERROR nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1086.478166] env[62346]: Faults: ['InvalidArgument'] [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Traceback (most recent call last): [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] yield resources [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self.driver.spawn(context, instance, image_meta, [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self._fetch_image_if_missing(context, vi) [ 1086.478166] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] image_cache(vi, tmp_image_ds_loc) [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] vm_util.copy_virtual_disk( [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] session._wait_for_task(vmdk_copy_task) [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] return self.wait_for_task(task_ref) [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] return evt.wait() [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] result = hub.switch() [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1086.478610] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] return self.greenlet.switch() [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self.f(*self.args, **self.kw) [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] raise exceptions.translate_fault(task_info.error) [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Faults: ['InvalidArgument'] [ 1086.479063] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] [ 1086.480023] env[62346]: INFO nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Terminating instance [ 1086.481422] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.481852] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.482593] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1086.482891] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1086.486017] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cad2c9f-13d4-4840-aa08-b368a947a221 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.486017] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399ec974-e333-41fb-b327-f8d56c2ac6f9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.493497] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1086.493897] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b447978d-8dd3-4a9c-8e06-b9eb47243b5b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.496433] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.496721] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1086.497808] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49cd3243-bbd1-4548-a7a4-c66948877d95 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.503066] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Waiting for the task: (returnval){ [ 1086.503066] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]526c33e2-1085-e9de-5368-d18375981a33" [ 1086.503066] env[62346]: _type = "Task" [ 1086.503066] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.513051] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]526c33e2-1085-e9de-5368-d18375981a33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.576352] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1086.576575] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1086.576755] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Deleting the datastore file [datastore2] 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.577094] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ca2e79d-55cb-4665-b11e-ccbbbf385d89 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.584661] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Waiting for the task: (returnval){ [ 1086.584661] env[62346]: value = "task-4891674" [ 1086.584661] env[62346]: _type = "Task" [ 1086.584661] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.593012] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Task: {'id': task-4891674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.014289] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1087.014289] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Creating directory with path [datastore2] vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.014517] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ed19990-039d-4a11-a508-f95b42a71ad4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.028357] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Created directory with path [datastore2] vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.028580] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Fetch image to [datastore2] vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1087.028763] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1087.029597] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f029cb-d0af-4636-acd4-fe6a375d9d02 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.037183] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593f5d27-9ada-4f64-a085-1dcd5a730b8a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.046718] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3decf13-a8cd-4924-8758-478b5432a69e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.077858] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b638300-d749-4ea8-9f28-7fd216dd289b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.084657] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-39c9922a-7b22-45ee-8e54-945095c341b1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.094574] env[62346]: DEBUG oslo_vmware.api [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Task: {'id': task-4891674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068331} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.094842] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.095030] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1087.095239] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1087.095419] env[62346]: INFO nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1087.097630] env[62346]: DEBUG nova.compute.claims [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1087.097872] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.098123] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.113526] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1087.171694] env[62346]: DEBUG oslo_vmware.rw_handles [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1087.230069] env[62346]: DEBUG oslo_vmware.rw_handles [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1087.230274] env[62346]: DEBUG oslo_vmware.rw_handles [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1087.513010] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036763b6-d518-4f9f-b7b3-39ab9bbe7084 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.520951] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8dd132d-d6b6-4035-85de-a87bc814dc96 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.551899] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8fd72a-4e6c-4592-83f7-3dfe37aceb99 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.559709] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a52c52-d6f3-4029-a595-20aeb7bf6669 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.573435] env[62346]: DEBUG nova.compute.provider_tree [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.582826] env[62346]: DEBUG nova.scheduler.client.report [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1087.597901] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.500s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.598493] env[62346]: ERROR nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1087.598493] env[62346]: Faults: ['InvalidArgument'] [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Traceback (most recent call last): [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self.driver.spawn(context, instance, image_meta, [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self._fetch_image_if_missing(context, vi) [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] image_cache(vi, tmp_image_ds_loc) [ 1087.598493] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] vm_util.copy_virtual_disk( [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] session._wait_for_task(vmdk_copy_task) [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] return self.wait_for_task(task_ref) [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] return evt.wait() [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] result = hub.switch() [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] return self.greenlet.switch() [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1087.598882] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] self.f(*self.args, **self.kw) [ 1087.599225] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1087.599225] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] raise exceptions.translate_fault(task_info.error) [ 1087.599225] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1087.599225] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Faults: ['InvalidArgument'] [ 1087.599225] env[62346]: ERROR nova.compute.manager [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] [ 1087.599467] env[62346]: DEBUG nova.compute.utils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1087.601157] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Build of instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd was re-scheduled: A specified parameter was not correct: fileType [ 1087.601157] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1087.601506] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1087.601682] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1087.601904] env[62346]: DEBUG nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1087.602104] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.160482] env[62346]: DEBUG nova.network.neutron [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.177980] env[62346]: INFO nova.compute.manager [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Took 0.58 seconds to deallocate network for instance. [ 1088.297795] env[62346]: INFO nova.scheduler.client.report [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Deleted allocations for instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd [ 1088.322053] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8b0f2a78-4dad-4a57-bc48-b2cdbbeb8bce tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 519.898s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.322053] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 319.906s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.322053] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Acquiring lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.322306] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.322306] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.324685] env[62346]: INFO nova.compute.manager [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Terminating instance [ 1088.326646] env[62346]: DEBUG nova.compute.manager [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1088.326996] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1088.327652] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed4833ba-2c76-48fa-b751-697e9196f7d0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.338995] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc1b916-d76f-423a-bba0-169560a7b642 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.351432] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1088.378433] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd could not be found. [ 1088.378681] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1088.379062] env[62346]: INFO nova.compute.manager [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1088.379353] env[62346]: DEBUG oslo.service.loopingcall [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.379640] env[62346]: DEBUG nova.compute.manager [-] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1088.379741] env[62346]: DEBUG nova.network.neutron [-] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.407042] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.407339] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.408938] env[62346]: INFO nova.compute.claims [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1088.411972] env[62346]: DEBUG nova.network.neutron [-] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.423731] env[62346]: INFO nova.compute.manager [-] [instance: 7c5cd006-1ac3-430f-aeb5-c58e3a9223bd] Took 0.04 seconds to deallocate network for instance. [ 1088.529529] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c02b72d1-93b5-4db1-bd8a-c8252b24295f tempest-ImagesOneServerNegativeTestJSON-299772790 tempest-ImagesOneServerNegativeTestJSON-299772790-project-member] Lock "7c5cd006-1ac3-430f-aeb5-c58e3a9223bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.820469] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3660f60c-3206-438e-9b52-8f8b7dbb4b93 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.829028] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00807bd4-0fa7-4bf8-999e-d6b361254380 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.862747] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd68dd3-0232-4449-8448-e16fbf4c2114 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.872019] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb631f5d-b5f2-4b7c-8a76-9fe817dbed7c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.890017] env[62346]: DEBUG nova.compute.provider_tree [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.901271] env[62346]: DEBUG nova.scheduler.client.report [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1088.920341] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.513s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.920903] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1088.957080] env[62346]: DEBUG nova.compute.utils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1088.959410] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1088.959772] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1088.970110] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1089.048426] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1089.063936] env[62346]: DEBUG nova.policy [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9bca6efe05bf4ad3908400ced0540799', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '114b0cf5aadd493e9496f1c436aede91', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1089.078972] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1089.079273] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1089.079430] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.079608] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1089.079751] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.079896] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1089.080121] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1089.080284] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1089.080446] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1089.080605] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1089.080811] env[62346]: DEBUG nova.virt.hardware [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1089.081790] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450326fb-021a-4fc1-958a-ce8609776415 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.091041] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20f6028-fd0f-462f-aaab-77f585d4666f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.544021] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Successfully created port: b22a7c31-6c02-4dd2-a7fb-072e8d3be348 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.339602] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Successfully updated port: b22a7c31-6c02-4dd2-a7fb-072e8d3be348 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1090.353857] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "refresh_cache-1c3a2024-f6c0-4f6f-86a6-af5debee0479" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.354067] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquired lock "refresh_cache-1c3a2024-f6c0-4f6f-86a6-af5debee0479" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.355329] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1090.358175] env[62346]: DEBUG nova.compute.manager [req-42d1225f-7347-4d83-837b-f3161773c6c1 req-20ab69cc-cb36-49dd-844c-233206d4e2c4 service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Received event network-vif-plugged-b22a7c31-6c02-4dd2-a7fb-072e8d3be348 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1090.358404] env[62346]: DEBUG oslo_concurrency.lockutils [req-42d1225f-7347-4d83-837b-f3161773c6c1 req-20ab69cc-cb36-49dd-844c-233206d4e2c4 service nova] Acquiring lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.358598] env[62346]: DEBUG oslo_concurrency.lockutils [req-42d1225f-7347-4d83-837b-f3161773c6c1 req-20ab69cc-cb36-49dd-844c-233206d4e2c4 service nova] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.358761] env[62346]: DEBUG oslo_concurrency.lockutils [req-42d1225f-7347-4d83-837b-f3161773c6c1 req-20ab69cc-cb36-49dd-844c-233206d4e2c4 service nova] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.358923] env[62346]: DEBUG nova.compute.manager [req-42d1225f-7347-4d83-837b-f3161773c6c1 req-20ab69cc-cb36-49dd-844c-233206d4e2c4 service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] No waiting events found dispatching network-vif-plugged-b22a7c31-6c02-4dd2-a7fb-072e8d3be348 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1090.359149] env[62346]: WARNING nova.compute.manager [req-42d1225f-7347-4d83-837b-f3161773c6c1 req-20ab69cc-cb36-49dd-844c-233206d4e2c4 service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Received unexpected event network-vif-plugged-b22a7c31-6c02-4dd2-a7fb-072e8d3be348 for instance with vm_state building and task_state spawning. [ 1090.427341] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1090.715713] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Updating instance_info_cache with network_info: [{"id": "b22a7c31-6c02-4dd2-a7fb-072e8d3be348", "address": "fa:16:3e:cc:5a:92", "network": {"id": "190844d3-5cb3-43b4-9549-b3020c5a5f4e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1581989393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114b0cf5aadd493e9496f1c436aede91", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb22a7c31-6c", "ovs_interfaceid": "b22a7c31-6c02-4dd2-a7fb-072e8d3be348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.732658] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Releasing lock "refresh_cache-1c3a2024-f6c0-4f6f-86a6-af5debee0479" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.732963] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance network_info: |[{"id": "b22a7c31-6c02-4dd2-a7fb-072e8d3be348", "address": "fa:16:3e:cc:5a:92", "network": {"id": "190844d3-5cb3-43b4-9549-b3020c5a5f4e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1581989393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114b0cf5aadd493e9496f1c436aede91", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb22a7c31-6c", "ovs_interfaceid": "b22a7c31-6c02-4dd2-a7fb-072e8d3be348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.733426] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:5a:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b22a7c31-6c02-4dd2-a7fb-072e8d3be348', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.740738] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Creating folder: Project (114b0cf5aadd493e9496f1c436aede91). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1090.741373] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4039c01-763a-4f02-b4be-e813e1f04409 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.752031] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Created folder: Project (114b0cf5aadd493e9496f1c436aede91) in parent group-v953204. [ 1090.752293] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Creating folder: Instances. Parent ref: group-v953269. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1090.752471] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4b98c6c-363d-4981-850b-a892c53c1d54 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.762232] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Created folder: Instances in parent group-v953269. [ 1090.762473] env[62346]: DEBUG oslo.service.loopingcall [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.762663] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1090.762920] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09a16b3a-b6e3-46b5-a0f5-7fc9258636ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.782178] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.782178] env[62346]: value = "task-4891677" [ 1090.782178] env[62346]: _type = "Task" [ 1090.782178] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.789623] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891677, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.294322] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891677, 'name': CreateVM_Task, 'duration_secs': 0.297904} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.294679] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1091.295681] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.295980] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.296517] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1091.296854] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f31c3a1-261e-4ee8-a13a-b8797954a442 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.303810] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Waiting for the task: (returnval){ [ 1091.303810] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5291cf02-bf72-c0b1-ef81-34677e79e074" [ 1091.303810] env[62346]: _type = "Task" [ 1091.303810] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.312718] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5291cf02-bf72-c0b1-ef81-34677e79e074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.814926] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.815246] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.815435] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.498220] env[62346]: DEBUG nova.compute.manager [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Received event network-changed-b22a7c31-6c02-4dd2-a7fb-072e8d3be348 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1092.498417] env[62346]: DEBUG nova.compute.manager [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Refreshing instance network info cache due to event network-changed-b22a7c31-6c02-4dd2-a7fb-072e8d3be348. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1092.498814] env[62346]: DEBUG oslo_concurrency.lockutils [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] Acquiring lock "refresh_cache-1c3a2024-f6c0-4f6f-86a6-af5debee0479" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.498814] env[62346]: DEBUG oslo_concurrency.lockutils [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] Acquired lock "refresh_cache-1c3a2024-f6c0-4f6f-86a6-af5debee0479" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.499172] env[62346]: DEBUG nova.network.neutron [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Refreshing network info cache for port b22a7c31-6c02-4dd2-a7fb-072e8d3be348 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.126018] env[62346]: DEBUG nova.network.neutron [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Updated VIF entry in instance network info cache for port b22a7c31-6c02-4dd2-a7fb-072e8d3be348. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.126397] env[62346]: DEBUG nova.network.neutron [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Updating instance_info_cache with network_info: [{"id": "b22a7c31-6c02-4dd2-a7fb-072e8d3be348", "address": "fa:16:3e:cc:5a:92", "network": {"id": "190844d3-5cb3-43b4-9549-b3020c5a5f4e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1581989393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114b0cf5aadd493e9496f1c436aede91", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb22a7c31-6c", "ovs_interfaceid": "b22a7c31-6c02-4dd2-a7fb-072e8d3be348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.136395] env[62346]: DEBUG oslo_concurrency.lockutils [req-625000f5-5fae-470b-a33f-e71a454af1d7 req-8c371c91-594d-4dd5-8296-bacd59c16c3f service nova] Releasing lock "refresh_cache-1c3a2024-f6c0-4f6f-86a6-af5debee0479" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.220481] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.232795] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.233757] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.234114] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.234417] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1097.236246] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffba106-57bd-42d8-9b27-f3a55ed81baa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.246595] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d302c78-03f0-4d1a-ad2b-1c13b5337e27 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.269532] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28529a40-41af-402e-ac3f-19fc338d9ce9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.277569] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd43a6eb-b79e-4399-8cd2-33fc766b5304 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.318493] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180544MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1097.318493] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.318493] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.416745] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29eba656-6430-4009-8d24-c5a6f33bef95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.416745] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417241] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417241] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417241] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417241] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417447] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417447] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417534] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.417924] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1097.436416] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.451214] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 723d75ac-364c-4e21-a664-879e99839c16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.472391] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67812f8f-9aee-42ac-b22e-5761240cbb7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.489037] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29516cd3-7d37-40d0-9f8a-9215ae69938b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.501012] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.512491] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.524457] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.536168] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0da3e07d-07a7-4c1a-a3aa-ae4973311d80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.547437] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance be701e07-33b0-48de-962b-5051d1c2e2ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.561490] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.575171] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0e30d5a5-0c28-411a-b0fd-8385d86323c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.586794] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 793e37e0-7e21-49f9-aaf2-44a42af1f4b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.598121] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 55fe8916-2b2f-4912-ae6b-4a06db60fedc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.609271] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1097.609415] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1097.609615] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '45', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_cfcd954a9e4d47cc893007bfd1c87a93': '1', 'io_workload': '10', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_task_spawning': '2', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1097.911905] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.097094] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3768c7c8-9a8c-47bd-a0a8-a0b99a3c4c48 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.105978] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73e6714-a4b5-400f-abb5-993c41cbd383 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.142453] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da31520d-63b5-44d0-813c-01ab7a64180d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.151933] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b00deb7-5b19-4b8b-b298-569c1948bfa7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.165202] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.174174] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1098.189804] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1098.190013] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.872s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.218265] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.218583] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.220053] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.220314] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1101.231757] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] There are 0 instances to clean {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1102.231960] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.232285] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1102.233022] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1102.255487] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.255653] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.255786] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.255930] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256374] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256374] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256374] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256374] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256538] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256600] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1102.256714] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1102.257324] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.220643] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.221403] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.220364] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.220790] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.221017] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances with incomplete migration {{(pid=62346) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1105.227171] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.250685] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.220637] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.228726] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.229151] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.229151] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1131.634627] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.655293] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Getting list of instances from cluster (obj){ [ 1131.655293] env[62346]: value = "domain-c8" [ 1131.655293] env[62346]: _type = "ClusterComputeResource" [ 1131.655293] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1131.656917] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fef15a-408a-4b07-8b0d-517a8fd98f4f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.675076] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Got total of 10 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1131.675273] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 29eba656-6430-4009-8d24-c5a6f33bef95 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.675552] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 8e698dc0-2883-4987-8baa-f5b6b43fff06 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.675727] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 88293424-7e33-4c64-ac8a-fc1f5494f01d {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.675878] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 3bca1346-07e6-4514-8ea0-5783b9640849 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.676038] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid c72a59f9-220d-4da4-8daa-2724ab255190 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.676191] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid af33f439-7ebe-478a-83ee-f7fc8e7b630d {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.676336] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 6155a6c2-3d55-4fe6-bade-a97db98796a0 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.676492] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 4d8eeb53-06e4-423f-8719-10f5283175b4 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.676630] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid c8d8504b-c17f-4b1e-9769-843e49df0ea4 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.676771] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 1c3a2024-f6c0-4f6f-86a6-af5debee0479 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1131.677104] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "29eba656-6430-4009-8d24-c5a6f33bef95" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.677336] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.677553] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.677767] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "3bca1346-07e6-4514-8ea0-5783b9640849" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.677966] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "c72a59f9-220d-4da4-8daa-2724ab255190" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.678180] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.678399] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.678618] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "4d8eeb53-06e4-423f-8719-10f5283175b4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.678827] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.679043] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.815058] env[62346]: WARNING oslo_vmware.rw_handles [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1135.815058] env[62346]: ERROR oslo_vmware.rw_handles [ 1135.815651] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1135.817634] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1135.817923] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Copying Virtual Disk [datastore2] vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/bedc1f57-1ffe-4eab-8e66-0de532bad910/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1135.818217] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eac9d3c4-9c3b-4d42-8829-89f56cbc9e59 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.826214] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Waiting for the task: (returnval){ [ 1135.826214] env[62346]: value = "task-4891678" [ 1135.826214] env[62346]: _type = "Task" [ 1135.826214] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.834547] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Task: {'id': task-4891678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.336771] env[62346]: DEBUG oslo_vmware.exceptions [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1136.337087] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.337651] env[62346]: ERROR nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.337651] env[62346]: Faults: ['InvalidArgument'] [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Traceback (most recent call last): [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] yield resources [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self.driver.spawn(context, instance, image_meta, [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self._fetch_image_if_missing(context, vi) [ 1136.337651] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] image_cache(vi, tmp_image_ds_loc) [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] vm_util.copy_virtual_disk( [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] session._wait_for_task(vmdk_copy_task) [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] return self.wait_for_task(task_ref) [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] return evt.wait() [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] result = hub.switch() [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1136.338072] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] return self.greenlet.switch() [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self.f(*self.args, **self.kw) [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] raise exceptions.translate_fault(task_info.error) [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Faults: ['InvalidArgument'] [ 1136.338465] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] [ 1136.338465] env[62346]: INFO nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Terminating instance [ 1136.339584] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.339788] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.340035] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a98addb0-edd7-419e-8685-0416abe69a5d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.342363] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1136.342526] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1136.343317] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1017e7cf-5e5b-4da0-9b17-e806f4d4e202 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.350833] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1136.351253] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95406c61-a0a4-4bb8-bb4a-13ead883f67f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.353747] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.353930] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1136.354931] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-566da470-7cb0-4c83-8bf5-a5553bd9764a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.360022] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Waiting for the task: (returnval){ [ 1136.360022] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ed4662-c457-4d5d-7a70-4bd8c163047d" [ 1136.360022] env[62346]: _type = "Task" [ 1136.360022] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.368149] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ed4662-c457-4d5d-7a70-4bd8c163047d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.428645] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1136.428877] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1136.429075] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Deleting the datastore file [datastore2] 29eba656-6430-4009-8d24-c5a6f33bef95 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.429389] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4109adba-c3fa-4494-a0b2-3808b82995f7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.437497] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Waiting for the task: (returnval){ [ 1136.437497] env[62346]: value = "task-4891680" [ 1136.437497] env[62346]: _type = "Task" [ 1136.437497] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.446892] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Task: {'id': task-4891680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.871082] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1136.871391] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Creating directory with path [datastore2] vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.871634] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c86026ff-b23f-44dd-9750-b08446aa7ba7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.885345] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Created directory with path [datastore2] vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.885554] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Fetch image to [datastore2] vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1136.885725] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1136.886522] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c37ef5-8cd4-4a92-98e1-499d165d9016 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.894149] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3d6a1f-3247-4bd6-bce4-f9b0d553f3b3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.903657] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef27de5-128f-4a32-bd57-0d126db9e25b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.934322] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96086847-ae84-434a-b501-c1964554ff1e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.943282] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-be14c5e6-5056-4747-b823-0e7d8d392603 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.947731] env[62346]: DEBUG oslo_vmware.api [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Task: {'id': task-4891680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079893} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.948338] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.948528] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1136.948705] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1136.948878] env[62346]: INFO nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1136.951100] env[62346]: DEBUG nova.compute.claims [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1136.951280] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.951533] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.968456] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1137.024922] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1137.084136] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1137.084332] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1137.378038] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967976bb-1d89-4fd8-ab89-1f788851b063 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.386086] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0f387e-1a7f-485d-8979-ca00b9b04060 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.416643] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f291ba4-5e3f-403d-958a-330b2eb7599f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.424099] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6467c776-a67f-457a-b3d8-d41e5b28362b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.437432] env[62346]: DEBUG nova.compute.provider_tree [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.446817] env[62346]: DEBUG nova.scheduler.client.report [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1137.461976] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.510s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.462564] env[62346]: ERROR nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.462564] env[62346]: Faults: ['InvalidArgument'] [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Traceback (most recent call last): [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self.driver.spawn(context, instance, image_meta, [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self._fetch_image_if_missing(context, vi) [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] image_cache(vi, tmp_image_ds_loc) [ 1137.462564] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] vm_util.copy_virtual_disk( [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] session._wait_for_task(vmdk_copy_task) [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] return self.wait_for_task(task_ref) [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] return evt.wait() [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] result = hub.switch() [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] return self.greenlet.switch() [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1137.462971] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] self.f(*self.args, **self.kw) [ 1137.463362] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1137.463362] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] raise exceptions.translate_fault(task_info.error) [ 1137.463362] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.463362] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Faults: ['InvalidArgument'] [ 1137.463362] env[62346]: ERROR nova.compute.manager [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] [ 1137.463362] env[62346]: DEBUG nova.compute.utils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1137.464722] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Build of instance 29eba656-6430-4009-8d24-c5a6f33bef95 was re-scheduled: A specified parameter was not correct: fileType [ 1137.464722] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1137.465090] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1137.465287] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1137.465461] env[62346]: DEBUG nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1137.465626] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1137.898030] env[62346]: DEBUG nova.network.neutron [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.910142] env[62346]: INFO nova.compute.manager [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Took 0.44 seconds to deallocate network for instance. [ 1138.021968] env[62346]: INFO nova.scheduler.client.report [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Deleted allocations for instance 29eba656-6430-4009-8d24-c5a6f33bef95 [ 1138.047659] env[62346]: DEBUG oslo_concurrency.lockutils [None req-75e58e31-a8b3-4947-85d6-a19c276a1b4f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "29eba656-6430-4009-8d24-c5a6f33bef95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.027s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.048883] env[62346]: DEBUG oslo_concurrency.lockutils [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "29eba656-6430-4009-8d24-c5a6f33bef95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 368.866s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.049114] env[62346]: DEBUG oslo_concurrency.lockutils [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Acquiring lock "29eba656-6430-4009-8d24-c5a6f33bef95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.049319] env[62346]: DEBUG oslo_concurrency.lockutils [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "29eba656-6430-4009-8d24-c5a6f33bef95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.049488] env[62346]: DEBUG oslo_concurrency.lockutils [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "29eba656-6430-4009-8d24-c5a6f33bef95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.051761] env[62346]: INFO nova.compute.manager [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Terminating instance [ 1138.055030] env[62346]: DEBUG nova.compute.manager [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1138.055030] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1138.055261] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce9f433c-ff9c-486e-a3ae-f40d775f4169 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.061055] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1138.067614] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b590918-b28f-4c25-b0d4-496fc3d1a641 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.103844] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29eba656-6430-4009-8d24-c5a6f33bef95 could not be found. [ 1138.104105] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1138.104292] env[62346]: INFO nova.compute.manager [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1138.104530] env[62346]: DEBUG oslo.service.loopingcall [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.106974] env[62346]: DEBUG nova.compute.manager [-] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1138.107092] env[62346]: DEBUG nova.network.neutron [-] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1138.122514] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.122702] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.124704] env[62346]: INFO nova.compute.claims [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1138.143737] env[62346]: DEBUG nova.network.neutron [-] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.155129] env[62346]: INFO nova.compute.manager [-] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] Took 0.05 seconds to deallocate network for instance. [ 1138.259834] env[62346]: DEBUG oslo_concurrency.lockutils [None req-de74cd2f-ff82-4077-88c0-13d27e45918f tempest-TenantUsagesTestJSON-1573802576 tempest-TenantUsagesTestJSON-1573802576-project-member] Lock "29eba656-6430-4009-8d24-c5a6f33bef95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.260208] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "29eba656-6430-4009-8d24-c5a6f33bef95" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.583s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.260424] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 29eba656-6430-4009-8d24-c5a6f33bef95] During sync_power_state the instance has a pending task (deleting). Skip. [ 1138.260658] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "29eba656-6430-4009-8d24-c5a6f33bef95" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.492167] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd528ec5-dcd5-4427-a079-90d7a29168c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.500670] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b01aaa5-2ccd-45b7-a4d3-a8ca326f71f3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.530188] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253b4ccc-c63a-42f7-bb93-1b79358b60e7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.538393] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0731d3b8-d947-4615-9eb5-c1ae4112fcd7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.553464] env[62346]: DEBUG nova.compute.provider_tree [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.562423] env[62346]: DEBUG nova.scheduler.client.report [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.578773] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.456s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.579272] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1138.612188] env[62346]: DEBUG nova.compute.utils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1138.613935] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1138.613935] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1138.623018] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1138.698859] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1138.737871] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:34:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='3214ab9b-0cbb-4ed5-b564-c6dcf3589d19',id=37,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1015402308',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1138.738212] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1138.738409] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.738638] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1138.738819] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.739015] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1138.739291] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1138.739494] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1138.739712] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1138.739911] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1138.740158] env[62346]: DEBUG nova.virt.hardware [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1138.741102] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5c4352-4c68-4bbd-86f2-f31a3069d7c2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.746158] env[62346]: DEBUG nova.policy [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '900f3d2eeba94364a78d9453604afc95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d9801d7e83545239af34201cc557278', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1138.754270] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39e54aa-7924-411b-816e-ac869f34fae3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.457226] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Successfully created port: 4d502733-6733-4662-88fd-2133720d10da {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1140.214871] env[62346]: DEBUG nova.compute.manager [req-ea56f714-cb66-400d-84c6-e46a74c1d15c req-843d7033-14a6-4578-a483-979e84a25c05 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Received event network-vif-plugged-4d502733-6733-4662-88fd-2133720d10da {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1140.215122] env[62346]: DEBUG oslo_concurrency.lockutils [req-ea56f714-cb66-400d-84c6-e46a74c1d15c req-843d7033-14a6-4578-a483-979e84a25c05 service nova] Acquiring lock "045a7d28-8706-4818-be5f-20c03831686e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.215547] env[62346]: DEBUG oslo_concurrency.lockutils [req-ea56f714-cb66-400d-84c6-e46a74c1d15c req-843d7033-14a6-4578-a483-979e84a25c05 service nova] Lock "045a7d28-8706-4818-be5f-20c03831686e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.215785] env[62346]: DEBUG oslo_concurrency.lockutils [req-ea56f714-cb66-400d-84c6-e46a74c1d15c req-843d7033-14a6-4578-a483-979e84a25c05 service nova] Lock "045a7d28-8706-4818-be5f-20c03831686e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.216022] env[62346]: DEBUG nova.compute.manager [req-ea56f714-cb66-400d-84c6-e46a74c1d15c req-843d7033-14a6-4578-a483-979e84a25c05 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] No waiting events found dispatching network-vif-plugged-4d502733-6733-4662-88fd-2133720d10da {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1140.216337] env[62346]: WARNING nova.compute.manager [req-ea56f714-cb66-400d-84c6-e46a74c1d15c req-843d7033-14a6-4578-a483-979e84a25c05 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Received unexpected event network-vif-plugged-4d502733-6733-4662-88fd-2133720d10da for instance with vm_state building and task_state spawning. [ 1140.294052] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Successfully updated port: 4d502733-6733-4662-88fd-2133720d10da {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.306946] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "refresh_cache-045a7d28-8706-4818-be5f-20c03831686e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.307123] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired lock "refresh_cache-045a7d28-8706-4818-be5f-20c03831686e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.307275] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1140.354481] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1140.622962] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Updating instance_info_cache with network_info: [{"id": "4d502733-6733-4662-88fd-2133720d10da", "address": "fa:16:3e:ff:86:78", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.156", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d502733-67", "ovs_interfaceid": "4d502733-6733-4662-88fd-2133720d10da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.636843] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Releasing lock "refresh_cache-045a7d28-8706-4818-be5f-20c03831686e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.637105] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance network_info: |[{"id": "4d502733-6733-4662-88fd-2133720d10da", "address": "fa:16:3e:ff:86:78", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.156", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d502733-67", "ovs_interfaceid": "4d502733-6733-4662-88fd-2133720d10da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1140.637521] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:86:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '821e0800-fabf-48d0-87b4-db5a1eddce93', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d502733-6733-4662-88fd-2133720d10da', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.645760] env[62346]: DEBUG oslo.service.loopingcall [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1140.646425] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1140.646663] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcea17db-4645-4ba8-b75b-b55ed3cfac5b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.668366] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.668366] env[62346]: value = "task-4891681" [ 1140.668366] env[62346]: _type = "Task" [ 1140.668366] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.677203] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891681, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.181966] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891681, 'name': CreateVM_Task, 'duration_secs': 0.382101} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.182187] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1141.183111] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.183414] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.183873] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1141.184170] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cad044e-5dd3-46ae-b37a-0febe497be00 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.189453] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 1141.189453] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529bf1d6-06cf-a919-ff2e-3937d55097ef" [ 1141.189453] env[62346]: _type = "Task" [ 1141.189453] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.199370] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529bf1d6-06cf-a919-ff2e-3937d55097ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.700482] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.700750] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.700958] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.149392] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.296265] env[62346]: DEBUG nova.compute.manager [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Received event network-changed-4d502733-6733-4662-88fd-2133720d10da {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1142.296265] env[62346]: DEBUG nova.compute.manager [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Refreshing instance network info cache due to event network-changed-4d502733-6733-4662-88fd-2133720d10da. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1142.296265] env[62346]: DEBUG oslo_concurrency.lockutils [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] Acquiring lock "refresh_cache-045a7d28-8706-4818-be5f-20c03831686e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.296668] env[62346]: DEBUG oslo_concurrency.lockutils [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] Acquired lock "refresh_cache-045a7d28-8706-4818-be5f-20c03831686e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.296855] env[62346]: DEBUG nova.network.neutron [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Refreshing network info cache for port 4d502733-6733-4662-88fd-2133720d10da {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1142.497492] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "045a7d28-8706-4818-be5f-20c03831686e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.747229] env[62346]: DEBUG nova.network.neutron [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Updated VIF entry in instance network info cache for port 4d502733-6733-4662-88fd-2133720d10da. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1142.747586] env[62346]: DEBUG nova.network.neutron [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Updating instance_info_cache with network_info: [{"id": "4d502733-6733-4662-88fd-2133720d10da", "address": "fa:16:3e:ff:86:78", "network": {"id": "9ac825e2-5d1f-4eb7-9684-d50f09609c32", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.156", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "663b4a65cd3440018494db77614ee169", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "821e0800-fabf-48d0-87b4-db5a1eddce93", "external-id": "nsx-vlan-transportzone-97", "segmentation_id": 97, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d502733-67", "ovs_interfaceid": "4d502733-6733-4662-88fd-2133720d10da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.757779] env[62346]: DEBUG oslo_concurrency.lockutils [req-5804be0f-07af-4cfb-af19-1f6a7b168021 req-dd7b9f5a-ac69-442d-a72d-8b17c1c3c937 service nova] Releasing lock "refresh_cache-045a7d28-8706-4818-be5f-20c03831686e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.220687] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.235590] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.235833] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.236014] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.236181] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1159.237318] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3679aeee-c4e0-467c-84db-459cea9485e1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.246988] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0ff812-01b0-44e9-87ce-f898a2c0c6a4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.262476] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713c339c-54b2-4139-a313-8cec890ba647 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.269774] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc4d043-58d3-4902-aa0c-0c98f101e179 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.300261] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180582MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1159.300426] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.300628] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.502053] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.502234] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.502389] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.502569] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.502738] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.502876] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.502993] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.503130] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.503251] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.503366] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1159.515756] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 723d75ac-364c-4e21-a664-879e99839c16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.526607] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 67812f8f-9aee-42ac-b22e-5761240cbb7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.537158] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 29516cd3-7d37-40d0-9f8a-9215ae69938b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.547444] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.558275] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.569648] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.580804] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0da3e07d-07a7-4c1a-a3aa-ae4973311d80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.590366] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance be701e07-33b0-48de-962b-5051d1c2e2ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.600654] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.613830] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0e30d5a5-0c28-411a-b0fd-8385d86323c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.624828] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 793e37e0-7e21-49f9-aaf2-44a42af1f4b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.635119] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 55fe8916-2b2f-4912-ae6b-4a06db60fedc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.647553] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.658631] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1159.658877] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1159.659047] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '46', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_86b987e66ef94d7ab1a1bdb96ddc4a41': '1', 'io_workload': '10', 'num_proj_da4db2adf7dd413caf5d93fedf5af2dc': '1', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1159.676570] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing inventories for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1159.690916] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating ProviderTree inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1159.691129] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1159.702174] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing aggregate associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, aggregates: None {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1159.728715] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing trait associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1160.016599] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1eef6d8-96a7-43f6-800e-45bee5652511 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.026120] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43aab98f-30e8-4cbd-a6e2-fd4e4a745fbc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.056527] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821e0809-c533-475b-bde1-a1b3b8287334 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.064503] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962922ee-8f91-41b1-9183-129dee66e781 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.078778] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.087441] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1160.103082] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1160.103311] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.803s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.103288] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.103595] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1164.103595] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1164.126111] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.126289] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.126424] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.126551] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.126674] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.126823] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.126969] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.127112] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.127234] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.127355] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1164.127479] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1164.128039] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.220361] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.220599] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.220460] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.220460] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.220414] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.220606] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1169.221578] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.926540] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "21910ef4-a1af-4064-bf9e-350f78a938ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.926821] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.714755] env[62346]: WARNING oslo_vmware.rw_handles [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1185.714755] env[62346]: ERROR oslo_vmware.rw_handles [ 1185.714755] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1185.716727] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1185.717014] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Copying Virtual Disk [datastore2] vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/115040a0-2e60-4733-ae76-0630ce5984a7/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1185.717347] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed59f614-2acb-4b58-8459-9909724ed5e6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.726593] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Waiting for the task: (returnval){ [ 1185.726593] env[62346]: value = "task-4891682" [ 1185.726593] env[62346]: _type = "Task" [ 1185.726593] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.736265] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Task: {'id': task-4891682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.238127] env[62346]: DEBUG oslo_vmware.exceptions [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1186.238425] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.238984] env[62346]: ERROR nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1186.238984] env[62346]: Faults: ['InvalidArgument'] [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Traceback (most recent call last): [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] yield resources [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self.driver.spawn(context, instance, image_meta, [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self._fetch_image_if_missing(context, vi) [ 1186.238984] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] image_cache(vi, tmp_image_ds_loc) [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] vm_util.copy_virtual_disk( [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] session._wait_for_task(vmdk_copy_task) [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] return self.wait_for_task(task_ref) [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] return evt.wait() [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] result = hub.switch() [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1186.239500] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] return self.greenlet.switch() [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self.f(*self.args, **self.kw) [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] raise exceptions.translate_fault(task_info.error) [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Faults: ['InvalidArgument'] [ 1186.239969] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] [ 1186.239969] env[62346]: INFO nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Terminating instance [ 1186.241215] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.241421] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.242081] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-974075b8-78d2-4648-b59d-73e838aa96d7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.243989] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1186.244236] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1186.245040] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928dad87-b2d4-48a9-8371-22b4f0a5a628 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.252378] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1186.252621] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a552a07-4eea-405a-adb0-005affecf962 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.255092] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.255270] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1186.256294] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c960320a-c65d-49c6-9542-8d6355262d63 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.262319] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Waiting for the task: (returnval){ [ 1186.262319] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529ed228-e549-82eb-40ef-6b0704681949" [ 1186.262319] env[62346]: _type = "Task" [ 1186.262319] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.269941] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529ed228-e549-82eb-40ef-6b0704681949, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.326149] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1186.327037] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1186.327037] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Deleting the datastore file [datastore2] 8e698dc0-2883-4987-8baa-f5b6b43fff06 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.327037] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5b9a010-a77e-4e19-834b-3a926f06b9f1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.334595] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Waiting for the task: (returnval){ [ 1186.334595] env[62346]: value = "task-4891684" [ 1186.334595] env[62346]: _type = "Task" [ 1186.334595] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.344037] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Task: {'id': task-4891684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.776069] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1186.776562] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Creating directory with path [datastore2] vmware_temp/b679b871-0d48-4476-ae0d-c03e8c8f7d1e/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.776562] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2689841-65d0-4200-aa1d-2fde1123e879 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.789774] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Created directory with path [datastore2] vmware_temp/b679b871-0d48-4476-ae0d-c03e8c8f7d1e/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.789933] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Fetch image to [datastore2] vmware_temp/b679b871-0d48-4476-ae0d-c03e8c8f7d1e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1186.790270] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/b679b871-0d48-4476-ae0d-c03e8c8f7d1e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1186.790976] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0aefc90-e4cf-4f06-9a55-ee17d692101e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.798872] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6bdcde-b7f6-4214-9d4c-d0a87574f9cb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.809481] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05232fc-ef3c-46c2-91e6-28bc65049124 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.844709] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1effa7a-f7e0-444f-a3c1-fa4100535011 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.852932] env[62346]: DEBUG oslo_vmware.api [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Task: {'id': task-4891684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07826} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.854868] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.855396] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1186.855609] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1186.855826] env[62346]: INFO nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1186.860017] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d17a0bc0-afa4-4ea6-8c3e-1a88b763dd8e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.860017] env[62346]: DEBUG nova.compute.claims [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1186.860537] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.860683] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.887746] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1187.157609] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.160187] env[62346]: ERROR nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Traceback (most recent call last): [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = getattr(controller, method)(*args, **kwargs) [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._get(image_id) [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1187.160187] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] resp, body = self.http_client.get(url, headers=header) [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.request(url, 'GET', **kwargs) [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._handle_response(resp) [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise exc.from_response(resp, resp.content) [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] During handling of the above exception, another exception occurred: [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1187.160577] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Traceback (most recent call last): [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] yield resources [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self.driver.spawn(context, instance, image_meta, [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._fetch_image_if_missing(context, vi) [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] image_fetch(context, vi, tmp_image_ds_loc) [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] images.fetch_image( [ 1187.160921] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] metadata = IMAGE_API.get(context, image_ref) [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return session.show(context, image_id, [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] _reraise_translated_image_exception(image_id) [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise new_exc.with_traceback(exc_trace) [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = getattr(controller, method)(*args, **kwargs) [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1187.161311] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._get(image_id) [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] resp, body = self.http_client.get(url, headers=header) [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.request(url, 'GET', **kwargs) [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._handle_response(resp) [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise exc.from_response(resp, resp.content) [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1187.161678] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1187.162045] env[62346]: INFO nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Terminating instance [ 1187.162690] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.162905] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.163442] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.163604] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquired lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.163756] env[62346]: DEBUG nova.network.neutron [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1187.166999] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc258b15-5358-4782-bf1f-cb6008eac238 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.176815] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.176815] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1187.178081] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b83caa39-c551-4572-b945-a9c294d19b69 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.188620] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Waiting for the task: (returnval){ [ 1187.188620] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529bc651-0158-10fd-4037-fef82b73e977" [ 1187.188620] env[62346]: _type = "Task" [ 1187.188620] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.198573] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]529bc651-0158-10fd-4037-fef82b73e977, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.251545] env[62346]: DEBUG nova.network.neutron [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1187.271880] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad14442-febe-4ae9-a627-12d3a1783837 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.280647] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cc6783-647f-4b4f-a580-4ebef2db6ad6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.311588] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1cfb66-d937-49bf-96b1-764360c952fc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.319756] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f6d94b-ffcf-491c-8ab3-53bf5f9c5005 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.335465] env[62346]: DEBUG nova.compute.provider_tree [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.337459] env[62346]: DEBUG nova.network.neutron [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.346122] env[62346]: DEBUG nova.scheduler.client.report [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1187.352022] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Releasing lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.352022] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1187.352022] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1187.352022] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe8966e-1592-4f9e-af9e-f9840062ed3d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.359996] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1187.360314] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-251fa83a-516b-4487-bc48-b27503f2b629 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.369085] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.508s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.369856] env[62346]: ERROR nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1187.369856] env[62346]: Faults: ['InvalidArgument'] [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Traceback (most recent call last): [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self.driver.spawn(context, instance, image_meta, [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self._fetch_image_if_missing(context, vi) [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] image_cache(vi, tmp_image_ds_loc) [ 1187.369856] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] vm_util.copy_virtual_disk( [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] session._wait_for_task(vmdk_copy_task) [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] return self.wait_for_task(task_ref) [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] return evt.wait() [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] result = hub.switch() [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] return self.greenlet.switch() [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1187.370494] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] self.f(*self.args, **self.kw) [ 1187.371035] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1187.371035] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] raise exceptions.translate_fault(task_info.error) [ 1187.371035] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1187.371035] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Faults: ['InvalidArgument'] [ 1187.371035] env[62346]: ERROR nova.compute.manager [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] [ 1187.371035] env[62346]: DEBUG nova.compute.utils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1187.373500] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Build of instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 was re-scheduled: A specified parameter was not correct: fileType [ 1187.373500] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1187.373920] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1187.374125] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1187.374320] env[62346]: DEBUG nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1187.374515] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1187.396732] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1187.399592] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1187.399592] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Deleting the datastore file [datastore2] 88293424-7e33-4c64-ac8a-fc1f5494f01d {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.399592] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed80ef62-91f6-4197-a528-2701814ebc79 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.405739] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Waiting for the task: (returnval){ [ 1187.405739] env[62346]: value = "task-4891686" [ 1187.405739] env[62346]: _type = "Task" [ 1187.405739] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.416069] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Task: {'id': task-4891686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.700821] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1187.701096] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Creating directory with path [datastore2] vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.701354] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0042494e-45e5-489c-aa5c-eb50cb4bb197 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.715991] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Created directory with path [datastore2] vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.716458] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Fetch image to [datastore2] vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1187.716458] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1187.717221] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969a72aa-ac1d-4211-bcf9-888e963ef828 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.726595] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a453b5ec-48ad-4efe-a66e-2343a6f82be5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.737418] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50a69e3-d687-4245-8c66-ed170de59ab8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.773611] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13b75f4-9f71-42ac-bc43-9c27edc9f60f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.781229] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dde272c2-799b-4fbf-b9ea-8c4f88652cd1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.807345] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1187.924534] env[62346]: DEBUG oslo_vmware.api [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Task: {'id': task-4891686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0454} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.924897] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.925127] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1187.925288] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1187.925463] env[62346]: INFO nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Took 0.58 seconds to destroy the instance on the hypervisor. [ 1187.925710] env[62346]: DEBUG oslo.service.loopingcall [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1187.925960] env[62346]: DEBUG nova.compute.manager [-] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1187.929645] env[62346]: DEBUG nova.compute.claims [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1187.929645] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.929645] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.967995] env[62346]: DEBUG nova.network.neutron [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.989518] env[62346]: INFO nova.compute.manager [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Took 0.61 seconds to deallocate network for instance. [ 1188.007870] env[62346]: DEBUG oslo_vmware.rw_handles [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1188.083521] env[62346]: DEBUG oslo_vmware.rw_handles [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1188.083710] env[62346]: DEBUG oslo_vmware.rw_handles [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1188.150036] env[62346]: INFO nova.scheduler.client.report [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Deleted allocations for instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 [ 1188.179415] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9b083a5a-e3e3-412c-ad45-5d72488faf82 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.175s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.180713] env[62346]: DEBUG oslo_concurrency.lockutils [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 417.459s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.180929] env[62346]: DEBUG oslo_concurrency.lockutils [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Acquiring lock "8e698dc0-2883-4987-8baa-f5b6b43fff06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.181928] env[62346]: DEBUG oslo_concurrency.lockutils [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.181928] env[62346]: DEBUG oslo_concurrency.lockutils [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.186913] env[62346]: INFO nova.compute.manager [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Terminating instance [ 1188.190277] env[62346]: DEBUG nova.compute.manager [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1188.190277] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1188.190277] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e7a0410-f2a0-4af3-a6b2-aeed417c1236 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.192941] env[62346]: DEBUG nova.compute.manager [None req-57f9c5d6-72be-40d4-9fb5-eb719be663ae tempest-ServerExternalEventsTest-714518617 tempest-ServerExternalEventsTest-714518617-project-member] [instance: 723d75ac-364c-4e21-a664-879e99839c16] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1188.205737] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a6cfeb-bbf2-4e19-9b45-dc3dc66717c6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.227542] env[62346]: DEBUG nova.compute.manager [None req-57f9c5d6-72be-40d4-9fb5-eb719be663ae tempest-ServerExternalEventsTest-714518617 tempest-ServerExternalEventsTest-714518617-project-member] [instance: 723d75ac-364c-4e21-a664-879e99839c16] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1188.242263] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e698dc0-2883-4987-8baa-f5b6b43fff06 could not be found. [ 1188.242531] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1188.242623] env[62346]: INFO nova.compute.manager [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1188.242844] env[62346]: DEBUG oslo.service.loopingcall [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1188.248688] env[62346]: DEBUG nova.compute.manager [-] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1188.248688] env[62346]: DEBUG nova.network.neutron [-] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1188.259141] env[62346]: DEBUG oslo_concurrency.lockutils [None req-57f9c5d6-72be-40d4-9fb5-eb719be663ae tempest-ServerExternalEventsTest-714518617 tempest-ServerExternalEventsTest-714518617-project-member] Lock "723d75ac-364c-4e21-a664-879e99839c16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.502s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.274409] env[62346]: DEBUG nova.compute.manager [None req-dd52e83f-c72b-4f69-811c-87734aa0aabd tempest-ServersListShow296Test-1218146697 tempest-ServersListShow296Test-1218146697-project-member] [instance: 67812f8f-9aee-42ac-b22e-5761240cbb7c] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1188.289192] env[62346]: DEBUG nova.network.neutron [-] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.303228] env[62346]: INFO nova.compute.manager [-] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] Took 0.06 seconds to deallocate network for instance. [ 1188.314319] env[62346]: DEBUG nova.compute.manager [None req-dd52e83f-c72b-4f69-811c-87734aa0aabd tempest-ServersListShow296Test-1218146697 tempest-ServersListShow296Test-1218146697-project-member] [instance: 67812f8f-9aee-42ac-b22e-5761240cbb7c] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1188.341396] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dd52e83f-c72b-4f69-811c-87734aa0aabd tempest-ServersListShow296Test-1218146697 tempest-ServersListShow296Test-1218146697-project-member] Lock "67812f8f-9aee-42ac-b22e-5761240cbb7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.456s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.354804] env[62346]: DEBUG nova.compute.manager [None req-e0be7558-3f95-4314-91f6-692effb8a4f6 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 29516cd3-7d37-40d0-9f8a-9215ae69938b] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1188.384512] env[62346]: DEBUG nova.compute.manager [None req-e0be7558-3f95-4314-91f6-692effb8a4f6 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 29516cd3-7d37-40d0-9f8a-9215ae69938b] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1188.416166] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e0be7558-3f95-4314-91f6-692effb8a4f6 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "29516cd3-7d37-40d0-9f8a-9215ae69938b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.626s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.420821] env[62346]: DEBUG oslo_concurrency.lockutils [None req-035d260d-a88c-4ad6-a058-3492432e6814 tempest-ServerActionsTestOtherA-150056699 tempest-ServerActionsTestOtherA-150056699-project-member] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.240s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.422227] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 56.745s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.422472] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8e698dc0-2883-4987-8baa-f5b6b43fff06] During sync_power_state the instance has a pending task (deleting). Skip. [ 1188.422696] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "8e698dc0-2883-4987-8baa-f5b6b43fff06" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.433506] env[62346]: DEBUG nova.compute.manager [None req-08082b56-418b-48fa-9db9-e8b4e718ff54 tempest-VolumesAdminNegativeTest-1461191165 tempest-VolumesAdminNegativeTest-1461191165-project-member] [instance: bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1188.458812] env[62346]: DEBUG nova.compute.manager [None req-08082b56-418b-48fa-9db9-e8b4e718ff54 tempest-VolumesAdminNegativeTest-1461191165 tempest-VolumesAdminNegativeTest-1461191165-project-member] [instance: bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1188.486922] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e625d1d0-3575-41aa-bf46-d492f23efd64 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.494501] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed9f07a-e17b-4087-8406-3164d8982223 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.530125] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4187d65a-3679-412c-a415-702c9ecc22ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.539812] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16443d09-1c97-4734-a811-279fba676d95 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.545030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-08082b56-418b-48fa-9db9-e8b4e718ff54 tempest-VolumesAdminNegativeTest-1461191165 tempest-VolumesAdminNegativeTest-1461191165-project-member] Lock "bf689d0b-7ad8-47f0-9c00-8bf6d695dbf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.052s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.565225] env[62346]: DEBUG nova.compute.provider_tree [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.568448] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1188.579421] env[62346]: DEBUG nova.scheduler.client.report [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1188.599192] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.670s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.599975] env[62346]: ERROR nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Traceback (most recent call last): [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = getattr(controller, method)(*args, **kwargs) [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._get(image_id) [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1188.599975] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] resp, body = self.http_client.get(url, headers=header) [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.request(url, 'GET', **kwargs) [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._handle_response(resp) [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise exc.from_response(resp, resp.content) [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] During handling of the above exception, another exception occurred: [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1188.600668] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Traceback (most recent call last): [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self.driver.spawn(context, instance, image_meta, [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._fetch_image_if_missing(context, vi) [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] image_fetch(context, vi, tmp_image_ds_loc) [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] images.fetch_image( [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] metadata = IMAGE_API.get(context, image_ref) [ 1188.601062] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return session.show(context, image_id, [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] _reraise_translated_image_exception(image_id) [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise new_exc.with_traceback(exc_trace) [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = getattr(controller, method)(*args, **kwargs) [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._get(image_id) [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1188.601467] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] resp, body = self.http_client.get(url, headers=header) [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.request(url, 'GET', **kwargs) [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self._handle_response(resp) [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise exc.from_response(resp, resp.content) [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1188.601863] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1188.602205] env[62346]: DEBUG nova.compute.utils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1188.602907] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Build of instance 88293424-7e33-4c64-ac8a-fc1f5494f01d was re-scheduled: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1188.603222] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1188.603471] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.603620] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquired lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.603781] env[62346]: DEBUG nova.network.neutron [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1188.636625] env[62346]: DEBUG nova.network.neutron [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1188.641379] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.641606] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.643318] env[62346]: INFO nova.compute.claims [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.743035] env[62346]: DEBUG nova.network.neutron [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.752682] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Releasing lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.753032] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1188.753161] env[62346]: DEBUG nova.compute.manager [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1188.897041] env[62346]: INFO nova.scheduler.client.report [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Deleted allocations for instance 88293424-7e33-4c64-ac8a-fc1f5494f01d [ 1188.926911] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fe475826-7cd1-44fb-b0cd-f72511cf10f2 tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.630s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.927803] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 417.562s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.927923] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "88293424-7e33-4c64-ac8a-fc1f5494f01d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.928157] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.928330] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.931656] env[62346]: INFO nova.compute.manager [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Terminating instance [ 1188.932954] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquiring lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.933140] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Acquired lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.933319] env[62346]: DEBUG nova.network.neutron [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1188.979356] env[62346]: DEBUG nova.network.neutron [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1188.981405] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1189.059243] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.119596] env[62346]: DEBUG nova.network.neutron [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.128955] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65206d6-e4a2-453e-a2b4-2d4ff30016b2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.133257] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Releasing lock "refresh_cache-88293424-7e33-4c64-ac8a-fc1f5494f01d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.133701] env[62346]: DEBUG nova.compute.manager [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1189.133889] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1189.136586] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8ff0d84-3dff-4042-9403-97c6ef9c8820 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.140732] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806d02f6-995e-4f3e-b922-a8f3e3c08f36 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.176830] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac9b723-718e-485b-bad8-655c2be6c2fb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.188858] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c19fcf0-7f7f-48cd-a976-c164b9a3d928 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.197503] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a064b5-bfed-4bff-8736-b175c7ac0ef7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.211715] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88293424-7e33-4c64-ac8a-fc1f5494f01d could not be found. [ 1189.212510] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1189.212510] env[62346]: INFO nova.compute.manager [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1189.212510] env[62346]: DEBUG oslo.service.loopingcall [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1189.220225] env[62346]: DEBUG nova.compute.manager [-] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1189.220396] env[62346]: DEBUG nova.network.neutron [-] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1189.223028] env[62346]: DEBUG nova.compute.provider_tree [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.231417] env[62346]: DEBUG nova.scheduler.client.report [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1189.263538] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.620s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.263538] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1189.267856] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.208s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.269262] env[62346]: INFO nova.compute.claims [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.329283] env[62346]: DEBUG nova.compute.utils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1189.333578] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Not allocating networking since 'none' was specified. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1189.343781] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1189.389126] env[62346]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62346) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1189.389362] env[62346]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-b7ba72ef-4b9e-4aec-be7f-a80062e7c3f8'] [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1189.391034] env[62346]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1189.391619] env[62346]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.394021] env[62346]: ERROR oslo.service.loopingcall [ 1189.394725] env[62346]: ERROR nova.compute.manager [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.440366] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1189.446261] env[62346]: ERROR nova.compute.manager [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Traceback (most recent call last): [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] ret = obj(*args, **kwargs) [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] exception_handler_v20(status_code, error_body) [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise client_exc(message=error_message, [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Neutron server returns request_ids: ['req-b7ba72ef-4b9e-4aec-be7f-a80062e7c3f8'] [ 1189.446261] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] During handling of the above exception, another exception occurred: [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Traceback (most recent call last): [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._delete_instance(context, instance, bdms) [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._shutdown_instance(context, instance, bdms) [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._try_deallocate_network(context, instance, requested_networks) [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] with excutils.save_and_reraise_exception(): [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1189.446586] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self.force_reraise() [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise self.value [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] _deallocate_network_with_retries() [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return evt.wait() [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = hub.switch() [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.greenlet.switch() [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1189.446941] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = func(*self.args, **self.kw) [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] result = f(*args, **kwargs) [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._deallocate_network( [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self.network_api.deallocate_for_instance( [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] data = neutron.list_ports(**search_opts) [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] ret = obj(*args, **kwargs) [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.list('ports', self.ports_path, retrieve_all, [ 1189.447301] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] ret = obj(*args, **kwargs) [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] for r in self._pagination(collection, path, **params): [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] res = self.get(path, params=params) [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] ret = obj(*args, **kwargs) [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.retry_request("GET", action, body=body, [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] ret = obj(*args, **kwargs) [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1189.447702] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] return self.do_request(method, action, body=body, [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] ret = obj(*args, **kwargs) [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] self._handle_fault_response(status_code, replybody, resp) [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.448118] env[62346]: ERROR nova.compute.manager [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] [ 1189.472772] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1189.473041] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1189.473220] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.473400] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1189.473542] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.473683] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1189.473884] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1189.474047] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1189.474212] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1189.474370] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1189.474537] env[62346]: DEBUG nova.virt.hardware [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1189.475431] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a1aa17-3013-4ab0-8633-7e55d7da9578 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.482087] env[62346]: DEBUG oslo_concurrency.lockutils [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.554s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.485914] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 57.808s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.485914] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1189.485914] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "88293424-7e33-4c64-ac8a-fc1f5494f01d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.489867] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f60dfb-7711-4246-aadc-40d3376e44dd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.511576] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance VIF info [] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1189.519297] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Creating folder: Project (4bf15abf99ca484a9e823e9cf6595cd3). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1189.523961] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8cf8e3e4-f80f-46ff-80f0-e04759d1ccfc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.539580] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Created folder: Project (4bf15abf99ca484a9e823e9cf6595cd3) in parent group-v953204. [ 1189.539854] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Creating folder: Instances. Parent ref: group-v953273. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1189.540202] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3896f0c6-c890-415a-88d0-356937f7be52 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.549751] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Created folder: Instances in parent group-v953273. [ 1189.550183] env[62346]: DEBUG oslo.service.loopingcall [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1189.550183] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1189.550390] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0945fe6-2704-4e75-a884-ffd4d8612f76 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.571477] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1189.571477] env[62346]: value = "task-4891689" [ 1189.571477] env[62346]: _type = "Task" [ 1189.571477] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.578105] env[62346]: INFO nova.compute.manager [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] [instance: 88293424-7e33-4c64-ac8a-fc1f5494f01d] Successfully reverted task state from None on failure for instance. [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server [None req-259b5eaf-a13b-4f0e-b384-0982dcab997f tempest-ServerDiagnosticsV248Test-1555638860 tempest-ServerDiagnosticsV248Test-1555638860-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-b7ba72ef-4b9e-4aec-be7f-a80062e7c3f8'] [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1189.584631] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1189.585309] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1189.586068] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1189.586651] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1189.587126] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1189.587623] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1189.588124] env[62346]: ERROR oslo_messaging.rpc.server [ 1189.588737] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891689, 'name': CreateVM_Task} progress is 6%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.738685] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c609614f-c6d0-488f-aaaa-5f4864633e33 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.747731] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f432f51-1b47-4ebf-8491-1a1e22a0b2c6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.784259] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5077de-f941-42d9-ace2-6933a6c82752 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.793206] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7dc1e83-c18d-46d0-9d20-bec685a67366 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.809764] env[62346]: DEBUG nova.compute.provider_tree [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.822130] env[62346]: DEBUG nova.scheduler.client.report [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1189.837743] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.571s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.838713] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1189.891132] env[62346]: DEBUG nova.compute.utils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1189.892888] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1189.893201] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1189.902140] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1189.974749] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1190.005389] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1190.005763] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1190.005990] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1190.008039] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1190.008039] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1190.008039] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1190.008039] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1190.008357] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1190.008506] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1190.008705] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1190.008915] env[62346]: DEBUG nova.virt.hardware [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1190.009813] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5bf675-9fe0-4e1e-b27d-b5097e69e809 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.019833] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4106db8-ebaf-490e-aac5-cb80d2866090 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.081632] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891689, 'name': CreateVM_Task, 'duration_secs': 0.288037} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.081821] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1190.082292] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.082454] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.082779] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1190.083602] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f177917-46cf-4684-b036-f25f5f631a18 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.089081] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for the task: (returnval){ [ 1190.089081] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52785315-9a1d-50b9-4d00-c5554d990f06" [ 1190.089081] env[62346]: _type = "Task" [ 1190.089081] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.099953] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52785315-9a1d-50b9-4d00-c5554d990f06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.212865] env[62346]: DEBUG nova.policy [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ca3ec78503e4129bcd1af130d7812df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed41fdcbed524645bc79ee368edf832b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1190.603359] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.603359] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1190.603359] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.187094] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Successfully created port: 2739e9d3-195c-4eb7-addf-5d30f54391f8 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1191.534035] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.751568] env[62346]: DEBUG nova.compute.manager [req-0fb87b2b-15a0-4301-8e78-ac2a84619385 req-94f4d987-d90b-4321-9d5a-f16c5a45cb81 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Received event network-vif-plugged-2739e9d3-195c-4eb7-addf-5d30f54391f8 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1192.751882] env[62346]: DEBUG oslo_concurrency.lockutils [req-0fb87b2b-15a0-4301-8e78-ac2a84619385 req-94f4d987-d90b-4321-9d5a-f16c5a45cb81 service nova] Acquiring lock "da750b9f-b4d7-4c55-acfc-289222af9067-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.753780] env[62346]: DEBUG oslo_concurrency.lockutils [req-0fb87b2b-15a0-4301-8e78-ac2a84619385 req-94f4d987-d90b-4321-9d5a-f16c5a45cb81 service nova] Lock "da750b9f-b4d7-4c55-acfc-289222af9067-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.753780] env[62346]: DEBUG oslo_concurrency.lockutils [req-0fb87b2b-15a0-4301-8e78-ac2a84619385 req-94f4d987-d90b-4321-9d5a-f16c5a45cb81 service nova] Lock "da750b9f-b4d7-4c55-acfc-289222af9067-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.753916] env[62346]: DEBUG nova.compute.manager [req-0fb87b2b-15a0-4301-8e78-ac2a84619385 req-94f4d987-d90b-4321-9d5a-f16c5a45cb81 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] No waiting events found dispatching network-vif-plugged-2739e9d3-195c-4eb7-addf-5d30f54391f8 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1192.754551] env[62346]: WARNING nova.compute.manager [req-0fb87b2b-15a0-4301-8e78-ac2a84619385 req-94f4d987-d90b-4321-9d5a-f16c5a45cb81 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Received unexpected event network-vif-plugged-2739e9d3-195c-4eb7-addf-5d30f54391f8 for instance with vm_state building and task_state spawning. [ 1192.917308] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Successfully updated port: 2739e9d3-195c-4eb7-addf-5d30f54391f8 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1192.929013] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "refresh_cache-da750b9f-b4d7-4c55-acfc-289222af9067" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.929214] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquired lock "refresh_cache-da750b9f-b4d7-4c55-acfc-289222af9067" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.929381] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1193.040424] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1193.405713] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Updating instance_info_cache with network_info: [{"id": "2739e9d3-195c-4eb7-addf-5d30f54391f8", "address": "fa:16:3e:3d:cd:03", "network": {"id": "a208d1e4-7d83-4a59-a29f-ee586478324b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-271434053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed41fdcbed524645bc79ee368edf832b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2739e9d3-19", "ovs_interfaceid": "2739e9d3-195c-4eb7-addf-5d30f54391f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.420476] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Releasing lock "refresh_cache-da750b9f-b4d7-4c55-acfc-289222af9067" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.421257] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance network_info: |[{"id": "2739e9d3-195c-4eb7-addf-5d30f54391f8", "address": "fa:16:3e:3d:cd:03", "network": {"id": "a208d1e4-7d83-4a59-a29f-ee586478324b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-271434053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed41fdcbed524645bc79ee368edf832b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2739e9d3-19", "ovs_interfaceid": "2739e9d3-195c-4eb7-addf-5d30f54391f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1193.421412] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:cd:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2739e9d3-195c-4eb7-addf-5d30f54391f8', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.429114] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Creating folder: Project (ed41fdcbed524645bc79ee368edf832b). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1193.429779] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-648ad6c9-1fbe-4bb6-9ee3-2bd9bb6021c1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.442613] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Created folder: Project (ed41fdcbed524645bc79ee368edf832b) in parent group-v953204. [ 1193.443335] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Creating folder: Instances. Parent ref: group-v953279. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1193.443335] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5036a982-76a0-4a05-a887-02cee73ce9fa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.453674] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Created folder: Instances in parent group-v953279. [ 1193.453916] env[62346]: DEBUG oslo.service.loopingcall [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1193.454164] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1193.454408] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-403d36ec-68b5-4752-9b6f-9a4d5e5b6c56 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.475159] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.475159] env[62346]: value = "task-4891695" [ 1193.475159] env[62346]: _type = "Task" [ 1193.475159] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.483697] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891695, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.985665] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891695, 'name': CreateVM_Task, 'duration_secs': 0.342854} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.985903] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1193.995861] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.995963] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.996301] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1193.997336] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9977d7d9-2892-4166-905b-7ef1a5d623c7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.004073] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Waiting for the task: (returnval){ [ 1194.004073] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52783e2d-2d3e-ec66-017c-ef501a0cd443" [ 1194.004073] env[62346]: _type = "Task" [ 1194.004073] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.015759] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52783e2d-2d3e-ec66-017c-ef501a0cd443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.219150] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "c6d55895-0a7a-4088-a065-3337c6045878" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.219607] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "c6d55895-0a7a-4088-a065-3337c6045878" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.518799] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.519444] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1194.520011] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.674532] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "da750b9f-b4d7-4c55-acfc-289222af9067" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.807688] env[62346]: DEBUG nova.compute.manager [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Received event network-changed-2739e9d3-195c-4eb7-addf-5d30f54391f8 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1194.807892] env[62346]: DEBUG nova.compute.manager [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Refreshing instance network info cache due to event network-changed-2739e9d3-195c-4eb7-addf-5d30f54391f8. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1194.808028] env[62346]: DEBUG oslo_concurrency.lockutils [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] Acquiring lock "refresh_cache-da750b9f-b4d7-4c55-acfc-289222af9067" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.808227] env[62346]: DEBUG oslo_concurrency.lockutils [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] Acquired lock "refresh_cache-da750b9f-b4d7-4c55-acfc-289222af9067" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.808652] env[62346]: DEBUG nova.network.neutron [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Refreshing network info cache for port 2739e9d3-195c-4eb7-addf-5d30f54391f8 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1195.448952] env[62346]: DEBUG nova.network.neutron [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Updated VIF entry in instance network info cache for port 2739e9d3-195c-4eb7-addf-5d30f54391f8. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1195.450122] env[62346]: DEBUG nova.network.neutron [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Updating instance_info_cache with network_info: [{"id": "2739e9d3-195c-4eb7-addf-5d30f54391f8", "address": "fa:16:3e:3d:cd:03", "network": {"id": "a208d1e4-7d83-4a59-a29f-ee586478324b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-271434053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed41fdcbed524645bc79ee368edf832b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2739e9d3-19", "ovs_interfaceid": "2739e9d3-195c-4eb7-addf-5d30f54391f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.460880] env[62346]: DEBUG oslo_concurrency.lockutils [req-57141dca-3aba-469c-a25c-a1e0e71e797f req-74202980-eddc-4a85-b953-22d9c65fd103 service nova] Releasing lock "refresh_cache-da750b9f-b4d7-4c55-acfc-289222af9067" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.177924] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1989792c-b527-472e-8e74-48b58df653e5 tempest-ServerGroupTestJSON-1718991290 tempest-ServerGroupTestJSON-1718991290-project-member] Acquiring lock "3027e833-8cb3-4ace-bc05-f8370630e1ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.178401] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1989792c-b527-472e-8e74-48b58df653e5 tempest-ServerGroupTestJSON-1718991290 tempest-ServerGroupTestJSON-1718991290-project-member] Lock "3027e833-8cb3-4ace-bc05-f8370630e1ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.268592] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f903f639-22b7-4861-990d-2bb05344f63f tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "c64fd0a9-d455-448f-bb53-82999adccf14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.268937] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f903f639-22b7-4861-990d-2bb05344f63f tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "c64fd0a9-d455-448f-bb53-82999adccf14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.965188] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f8ae78c6-5747-4722-b466-e7d3100e1dbe tempest-ServerActionsV293TestJSON-384051578 tempest-ServerActionsV293TestJSON-384051578-project-member] Acquiring lock "3c523404-52dd-4248-bc92-a2d67b03009e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.965524] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f8ae78c6-5747-4722-b466-e7d3100e1dbe tempest-ServerActionsV293TestJSON-384051578 tempest-ServerActionsV293TestJSON-384051578-project-member] Lock "3c523404-52dd-4248-bc92-a2d67b03009e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.220669] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.232831] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.233070] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.233247] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.233405] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1221.234621] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3bd71c-d56f-447e-826d-a0f699944d30 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.244596] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d87347-b910-4c4e-b205-65a83bd3ce96 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.259785] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfb265f-7ed5-45fc-aadd-63d335a9a6e4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.267198] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27e1e4d-9357-4e2e-b818-2efa7a8e4566 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.298569] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180578MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1221.298728] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.298904] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.377840] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3bca1346-07e6-4514-8ea0-5783b9640849 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378014] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378150] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378270] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378389] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378513] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378623] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378739] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.378853] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.379031] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.390880] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.401698] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0e30d5a5-0c28-411a-b0fd-8385d86323c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.413547] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 793e37e0-7e21-49f9-aaf2-44a42af1f4b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.425942] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 55fe8916-2b2f-4912-ae6b-4a06db60fedc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.439124] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.449497] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.460869] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.471759] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.482863] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3027e833-8cb3-4ace-bc05-f8370630e1ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.495428] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c64fd0a9-d455-448f-bb53-82999adccf14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.507714] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3c523404-52dd-4248-bc92-a2d67b03009e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.507714] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1221.507714] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '52', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_9ea2f35bc89d45a4ade06ca64f5249f7': '1', 'io_workload': '10', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_4bf15abf99ca484a9e823e9cf6595cd3': '1', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1221.851811] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4fbd07-3299-4714-9677-3822507c587a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.859978] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07c2c3c-d829-4685-a70f-ba80e3aef664 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.906411] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ef30d6-96b2-4903-b409-27929984866b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.918851] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6906bb66-b5d5-4e16-b578-8dd4c2db7e29 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.934461] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.945575] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1221.964084] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1221.964217] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.665s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.964517] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.964838] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1224.964884] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1224.988721] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.988721] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.988721] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.988721] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.988721] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.989050] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.989050] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.989050] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.989050] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.989050] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.989452] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1224.989452] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.219384] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.215095] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.219842] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.215846] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.245241] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.245241] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.245241] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1230.220931] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.008835] env[62346]: DEBUG oslo_concurrency.lockutils [None req-019e3cef-920e-4a24-aba8-da18deeeefa7 tempest-ServerRescueTestJSONUnderV235-462403715 tempest-ServerRescueTestJSONUnderV235-462403715-project-member] Acquiring lock "00208615-17d0-4e20-b1e9-80819181109e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.009165] env[62346]: DEBUG oslo_concurrency.lockutils [None req-019e3cef-920e-4a24-aba8-da18deeeefa7 tempest-ServerRescueTestJSONUnderV235-462403715 tempest-ServerRescueTestJSONUnderV235-462403715-project-member] Lock "00208615-17d0-4e20-b1e9-80819181109e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.102783] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2fdf0690-dbd7-4f5f-b2be-8889bfba7c88 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Acquiring lock "8f0203a5-3dc5-4a2d-9a96-07bed465d1eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.103012] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2fdf0690-dbd7-4f5f-b2be-8889bfba7c88 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Lock "8f0203a5-3dc5-4a2d-9a96-07bed465d1eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.849025] env[62346]: WARNING oslo_vmware.rw_handles [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1235.849025] env[62346]: ERROR oslo_vmware.rw_handles [ 1235.849872] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1235.851760] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1235.852017] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Copying Virtual Disk [datastore2] vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/5cbdbd2c-5dd4-4654-9847-f4f697b233ae/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1235.852322] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed1926d5-e10b-4c8d-b494-65929fab5aee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.860997] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Waiting for the task: (returnval){ [ 1235.860997] env[62346]: value = "task-4891703" [ 1235.860997] env[62346]: _type = "Task" [ 1235.860997] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.869483] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Task: {'id': task-4891703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.372103] env[62346]: DEBUG oslo_vmware.exceptions [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1236.372394] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.373063] env[62346]: ERROR nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1236.373063] env[62346]: Faults: ['InvalidArgument'] [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Traceback (most recent call last): [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] yield resources [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self.driver.spawn(context, instance, image_meta, [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self._fetch_image_if_missing(context, vi) [ 1236.373063] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] image_cache(vi, tmp_image_ds_loc) [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] vm_util.copy_virtual_disk( [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] session._wait_for_task(vmdk_copy_task) [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] return self.wait_for_task(task_ref) [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] return evt.wait() [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] result = hub.switch() [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1236.373523] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] return self.greenlet.switch() [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self.f(*self.args, **self.kw) [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] raise exceptions.translate_fault(task_info.error) [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Faults: ['InvalidArgument'] [ 1236.373949] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] [ 1236.373949] env[62346]: INFO nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Terminating instance [ 1236.374966] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.375182] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.379071] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1236.379071] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1236.379071] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4713d46d-18f1-4d90-a9f9-0fa351a94693 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.379071] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f3bc73-a9cb-43eb-8d3a-fd1822f4ff60 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.387251] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1236.387475] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4328e88-6c2e-4a6c-a1b2-f50eda4989a6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.389924] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.390112] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1236.391371] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73ebf408-dc21-43f9-9ae6-4a98fe63e743 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.396442] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Waiting for the task: (returnval){ [ 1236.396442] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52029ad7-27b3-a9dc-aa80-5e1079ada659" [ 1236.396442] env[62346]: _type = "Task" [ 1236.396442] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.404294] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52029ad7-27b3-a9dc-aa80-5e1079ada659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.459066] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1236.459368] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1236.459592] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Deleting the datastore file [datastore2] 3bca1346-07e6-4514-8ea0-5783b9640849 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.459873] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e47092c7-6bdf-41af-81a1-c51c3a5e8b36 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.467347] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Waiting for the task: (returnval){ [ 1236.467347] env[62346]: value = "task-4891705" [ 1236.467347] env[62346]: _type = "Task" [ 1236.467347] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.476427] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Task: {'id': task-4891705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.909249] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1236.909567] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Creating directory with path [datastore2] vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.909749] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-509da9c1-7a57-4c54-abb3-e26adc2d9918 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.922239] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Created directory with path [datastore2] vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.922442] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Fetch image to [datastore2] vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1236.922667] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1236.923861] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd377dc4-6aca-4d34-8961-cd27733898b7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.931872] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed1d9c7-48c0-44d5-9173-be10d10afc54 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.943188] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af874403-e904-464f-bd3b-83f697cf7504 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.985759] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca42d7c-c665-41d1-935f-54953ff0cda2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.996623] env[62346]: DEBUG oslo_vmware.api [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Task: {'id': task-4891705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065256} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.996623] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.996948] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1236.997153] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1237.000953] env[62346]: INFO nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1237.000953] env[62346]: DEBUG nova.compute.claims [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1237.000953] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.000953] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.003537] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5908154f-c1a7-42e1-bf40-414138e92b4b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.031620] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1237.095600] env[62346]: DEBUG oslo_vmware.rw_handles [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1237.163378] env[62346]: DEBUG oslo_vmware.rw_handles [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1237.163597] env[62346]: DEBUG oslo_vmware.rw_handles [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1237.432810] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110a18f8-928c-483d-ad91-67ee352944bd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.442939] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088750cc-cc16-45bc-89fb-b2e652a79cf9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.476557] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9221e264-0b75-49fb-bcdc-bea2c37b6fa3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.485354] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f88fa0f-62a1-4fef-8c3b-1011d0e3a245 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.500397] env[62346]: DEBUG nova.compute.provider_tree [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.512208] env[62346]: DEBUG nova.scheduler.client.report [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1237.527895] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.527s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.528763] env[62346]: ERROR nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1237.528763] env[62346]: Faults: ['InvalidArgument'] [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Traceback (most recent call last): [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self.driver.spawn(context, instance, image_meta, [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self._fetch_image_if_missing(context, vi) [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] image_cache(vi, tmp_image_ds_loc) [ 1237.528763] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] vm_util.copy_virtual_disk( [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] session._wait_for_task(vmdk_copy_task) [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] return self.wait_for_task(task_ref) [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] return evt.wait() [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] result = hub.switch() [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] return self.greenlet.switch() [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1237.529375] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] self.f(*self.args, **self.kw) [ 1237.529919] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1237.529919] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] raise exceptions.translate_fault(task_info.error) [ 1237.529919] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1237.529919] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Faults: ['InvalidArgument'] [ 1237.529919] env[62346]: ERROR nova.compute.manager [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] [ 1237.529919] env[62346]: DEBUG nova.compute.utils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1237.531082] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Build of instance 3bca1346-07e6-4514-8ea0-5783b9640849 was re-scheduled: A specified parameter was not correct: fileType [ 1237.531082] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1237.531470] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1237.531651] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1237.531807] env[62346]: DEBUG nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1237.531973] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1238.356778] env[62346]: DEBUG nova.network.neutron [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.375304] env[62346]: INFO nova.compute.manager [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Took 0.84 seconds to deallocate network for instance. [ 1238.533558] env[62346]: INFO nova.scheduler.client.report [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Deleted allocations for instance 3bca1346-07e6-4514-8ea0-5783b9640849 [ 1238.559303] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1f339e7f-808c-4335-bbc0-9a56c3f073e8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "3bca1346-07e6-4514-8ea0-5783b9640849" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 652.654s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.561049] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "3bca1346-07e6-4514-8ea0-5783b9640849" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.608s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.561294] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "3bca1346-07e6-4514-8ea0-5783b9640849-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.562151] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "3bca1346-07e6-4514-8ea0-5783b9640849-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.562489] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "3bca1346-07e6-4514-8ea0-5783b9640849-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.565270] env[62346]: INFO nova.compute.manager [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Terminating instance [ 1238.567305] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquiring lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.567464] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Acquired lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.567630] env[62346]: DEBUG nova.network.neutron [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1238.602391] env[62346]: DEBUG nova.network.neutron [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1238.606328] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: 0da3e07d-07a7-4c1a-a3aa-ae4973311d80] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1238.641947] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: 0da3e07d-07a7-4c1a-a3aa-ae4973311d80] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1238.667597] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "0da3e07d-07a7-4c1a-a3aa-ae4973311d80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.442s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.679322] env[62346]: DEBUG nova.compute.manager [None req-e51964d6-2612-4e81-b97f-7216f3e20d97 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: be701e07-33b0-48de-962b-5051d1c2e2ff] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1238.710575] env[62346]: DEBUG nova.compute.manager [None req-e51964d6-2612-4e81-b97f-7216f3e20d97 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: be701e07-33b0-48de-962b-5051d1c2e2ff] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1238.756892] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e51964d6-2612-4e81-b97f-7216f3e20d97 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "be701e07-33b0-48de-962b-5051d1c2e2ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.635s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.777624] env[62346]: DEBUG nova.compute.manager [None req-1340031c-c495-43e3-a842-0ade251347a2 tempest-ServerAddressesNegativeTestJSON-1574771368 tempest-ServerAddressesNegativeTestJSON-1574771368-project-member] [instance: dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1238.807997] env[62346]: DEBUG nova.compute.manager [None req-1340031c-c495-43e3-a842-0ade251347a2 tempest-ServerAddressesNegativeTestJSON-1574771368 tempest-ServerAddressesNegativeTestJSON-1574771368-project-member] [instance: dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1238.838827] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1340031c-c495-43e3-a842-0ade251347a2 tempest-ServerAddressesNegativeTestJSON-1574771368 tempest-ServerAddressesNegativeTestJSON-1574771368-project-member] Lock "dfb8a211-9ccc-47f8-b07f-fbdb6ccefdf6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.931s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.843260] env[62346]: DEBUG nova.network.neutron [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.855204] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Releasing lock "refresh_cache-3bca1346-07e6-4514-8ea0-5783b9640849" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.855624] env[62346]: DEBUG nova.compute.manager [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1238.855842] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1238.856421] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b9fbd32-b12a-4222-bf96-f5e462a1e138 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.859555] env[62346]: DEBUG nova.compute.manager [None req-daacd5df-44cb-44c4-8ef2-2b0301aa1846 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] [instance: 0e30d5a5-0c28-411a-b0fd-8385d86323c4] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1238.869859] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b80537-c32d-47cc-be41-1ac142207e7e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.887516] env[62346]: DEBUG nova.compute.manager [None req-daacd5df-44cb-44c4-8ef2-2b0301aa1846 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] [instance: 0e30d5a5-0c28-411a-b0fd-8385d86323c4] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1238.904200] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3bca1346-07e6-4514-8ea0-5783b9640849 could not be found. [ 1238.904478] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1238.904691] env[62346]: INFO nova.compute.manager [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1238.905019] env[62346]: DEBUG oslo.service.loopingcall [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1238.907533] env[62346]: DEBUG nova.compute.manager [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1238.907627] env[62346]: DEBUG nova.network.neutron [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1238.921813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-daacd5df-44cb-44c4-8ef2-2b0301aa1846 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Lock "0e30d5a5-0c28-411a-b0fd-8385d86323c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.236s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.934811] env[62346]: DEBUG nova.compute.manager [None req-02c4ecd3-7b62-4f9b-9417-f974dd7e1ffc tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: 793e37e0-7e21-49f9-aaf2-44a42af1f4b1] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1238.939967] env[62346]: DEBUG nova.network.neutron [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1238.945450] env[62346]: DEBUG nova.network.neutron [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.960846] env[62346]: INFO nova.compute.manager [-] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] Took 0.05 seconds to deallocate network for instance. [ 1238.969021] env[62346]: DEBUG nova.compute.manager [None req-02c4ecd3-7b62-4f9b-9417-f974dd7e1ffc tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] [instance: 793e37e0-7e21-49f9-aaf2-44a42af1f4b1] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1238.995705] env[62346]: DEBUG oslo_concurrency.lockutils [None req-02c4ecd3-7b62-4f9b-9417-f974dd7e1ffc tempest-ImagesTestJSON-315076728 tempest-ImagesTestJSON-315076728-project-member] Lock "793e37e0-7e21-49f9-aaf2-44a42af1f4b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.523s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.029199] env[62346]: DEBUG nova.compute.manager [None req-dd689a49-1ef1-40f8-8e8f-020bf16d7048 tempest-ServersNegativeTestMultiTenantJSON-1760784361 tempest-ServersNegativeTestMultiTenantJSON-1760784361-project-member] [instance: 55fe8916-2b2f-4912-ae6b-4a06db60fedc] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1239.063044] env[62346]: DEBUG nova.compute.manager [None req-dd689a49-1ef1-40f8-8e8f-020bf16d7048 tempest-ServersNegativeTestMultiTenantJSON-1760784361 tempest-ServersNegativeTestMultiTenantJSON-1760784361-project-member] [instance: 55fe8916-2b2f-4912-ae6b-4a06db60fedc] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1239.087032] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9e69c80b-aad5-4ace-8c1c-bb8cfb9df4c8 tempest-ListServerFiltersTestJSON-1096806695 tempest-ListServerFiltersTestJSON-1096806695-project-member] Lock "3bca1346-07e6-4514-8ea0-5783b9640849" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.525s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.088498] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "3bca1346-07e6-4514-8ea0-5783b9640849" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 107.410s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.088498] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 3bca1346-07e6-4514-8ea0-5783b9640849] During sync_power_state the instance has a pending task (deleting). Skip. [ 1239.088498] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "3bca1346-07e6-4514-8ea0-5783b9640849" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.094809] env[62346]: DEBUG oslo_concurrency.lockutils [None req-dd689a49-1ef1-40f8-8e8f-020bf16d7048 tempest-ServersNegativeTestMultiTenantJSON-1760784361 tempest-ServersNegativeTestMultiTenantJSON-1760784361-project-member] Lock "55fe8916-2b2f-4912-ae6b-4a06db60fedc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.759s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.107314] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1239.191491] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.191729] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.193355] env[62346]: INFO nova.compute.claims [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1239.519724] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37aa3803-98dc-4f7d-a5e8-995d78aa4f07 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.527995] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f615c3-33d5-47fe-82db-394898a50932 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.567853] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b27039-b7bd-4430-8e59-ecda9b5cd8a5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.576109] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37324491-b48a-4f37-8b4d-db3d43945bc6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.592737] env[62346]: DEBUG nova.compute.provider_tree [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.607151] env[62346]: DEBUG nova.scheduler.client.report [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1239.623283] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.431s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.624084] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1239.685881] env[62346]: DEBUG nova.compute.utils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1239.687652] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1239.688042] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1239.698203] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1239.818627] env[62346]: DEBUG nova.policy [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c91a168730498c90a31900a69a5d5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f45e49e839f4cafaea598ac8f5fbd2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1239.846023] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1239.873342] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1239.873578] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1239.873844] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1239.876145] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1239.878189] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1239.878189] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1239.878189] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1239.878189] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1239.878189] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1239.878425] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1239.878425] env[62346]: DEBUG nova.virt.hardware [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1239.878811] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766a4dd3-5c1f-45ce-b6cb-2bf65adb3586 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.889805] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81b834a-06bc-410a-be16-8620a1e35fec {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.773019] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Successfully created port: 5e0528c1-9d78-440e-9705-4b879060c95e {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1242.318537] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Successfully updated port: 5e0528c1-9d78-440e-9705-4b879060c95e {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1242.333022] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "refresh_cache-88727b37-0f05-4551-ac87-e43385e0f76d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.333022] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired lock "refresh_cache-88727b37-0f05-4551-ac87-e43385e0f76d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.333022] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1242.365678] env[62346]: DEBUG nova.compute.manager [req-8254ce6b-978e-47f3-b0ec-cd5da4a252a1 req-68f7731a-fc60-4ad2-b701-a08f585e1bc9 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Received event network-vif-plugged-5e0528c1-9d78-440e-9705-4b879060c95e {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1242.365678] env[62346]: DEBUG oslo_concurrency.lockutils [req-8254ce6b-978e-47f3-b0ec-cd5da4a252a1 req-68f7731a-fc60-4ad2-b701-a08f585e1bc9 service nova] Acquiring lock "88727b37-0f05-4551-ac87-e43385e0f76d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.365918] env[62346]: DEBUG oslo_concurrency.lockutils [req-8254ce6b-978e-47f3-b0ec-cd5da4a252a1 req-68f7731a-fc60-4ad2-b701-a08f585e1bc9 service nova] Lock "88727b37-0f05-4551-ac87-e43385e0f76d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.365918] env[62346]: DEBUG oslo_concurrency.lockutils [req-8254ce6b-978e-47f3-b0ec-cd5da4a252a1 req-68f7731a-fc60-4ad2-b701-a08f585e1bc9 service nova] Lock "88727b37-0f05-4551-ac87-e43385e0f76d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.366679] env[62346]: DEBUG nova.compute.manager [req-8254ce6b-978e-47f3-b0ec-cd5da4a252a1 req-68f7731a-fc60-4ad2-b701-a08f585e1bc9 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] No waiting events found dispatching network-vif-plugged-5e0528c1-9d78-440e-9705-4b879060c95e {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1242.366679] env[62346]: WARNING nova.compute.manager [req-8254ce6b-978e-47f3-b0ec-cd5da4a252a1 req-68f7731a-fc60-4ad2-b701-a08f585e1bc9 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Received unexpected event network-vif-plugged-5e0528c1-9d78-440e-9705-4b879060c95e for instance with vm_state building and task_state spawning. [ 1242.433163] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1242.902818] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Updating instance_info_cache with network_info: [{"id": "5e0528c1-9d78-440e-9705-4b879060c95e", "address": "fa:16:3e:0a:40:ca", "network": {"id": "8fd2ed7f-9f59-475e-acd6-38de7c00c978", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1185369202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f45e49e839f4cafaea598ac8f5fbd2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e0528c1-9d", "ovs_interfaceid": "5e0528c1-9d78-440e-9705-4b879060c95e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.931215] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Releasing lock "refresh_cache-88727b37-0f05-4551-ac87-e43385e0f76d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.931438] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance network_info: |[{"id": "5e0528c1-9d78-440e-9705-4b879060c95e", "address": "fa:16:3e:0a:40:ca", "network": {"id": "8fd2ed7f-9f59-475e-acd6-38de7c00c978", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1185369202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f45e49e839f4cafaea598ac8f5fbd2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e0528c1-9d", "ovs_interfaceid": "5e0528c1-9d78-440e-9705-4b879060c95e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1242.931886] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:40:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e0528c1-9d78-440e-9705-4b879060c95e', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1242.940494] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating folder: Project (5f45e49e839f4cafaea598ac8f5fbd2f). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1242.941186] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d1c4374-a54c-4c85-b800-ebbf5dfb479f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.954025] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Created folder: Project (5f45e49e839f4cafaea598ac8f5fbd2f) in parent group-v953204. [ 1242.954025] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating folder: Instances. Parent ref: group-v953283. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1242.954025] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54843161-924b-4e2e-84a4-c44098e7e9c7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.967132] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Created folder: Instances in parent group-v953283. [ 1242.968038] env[62346]: DEBUG oslo.service.loopingcall [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.968292] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1242.968517] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88b6a213-9023-4e48-9fc2-f591d7547d35 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.991640] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.991640] env[62346]: value = "task-4891708" [ 1242.991640] env[62346]: _type = "Task" [ 1242.991640] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.002485] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891708, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.503959] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891708, 'name': CreateVM_Task, 'duration_secs': 0.33769} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.504256] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1243.504844] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.505016] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.505338] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1243.505582] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8790525d-4f2f-4a22-bcff-a667e9fbee01 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.510558] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 1243.510558] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523771e2-daf0-4ca0-e993-7177114c2e39" [ 1243.510558] env[62346]: _type = "Task" [ 1243.510558] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.526457] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.526723] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.526910] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.323887] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.324038] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.351994] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "9423ec7b-edb3-4cce-9aae-4c8076011284" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.352250] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "9423ec7b-edb3-4cce-9aae-4c8076011284" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.377536] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "21c12062-6eb2-4e25-b780-a3678b18d278" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.377767] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "21c12062-6eb2-4e25-b780-a3678b18d278" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.404553] env[62346]: DEBUG nova.compute.manager [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Received event network-changed-5e0528c1-9d78-440e-9705-4b879060c95e {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1244.404636] env[62346]: DEBUG nova.compute.manager [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Refreshing instance network info cache due to event network-changed-5e0528c1-9d78-440e-9705-4b879060c95e. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1244.404850] env[62346]: DEBUG oslo_concurrency.lockutils [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] Acquiring lock "refresh_cache-88727b37-0f05-4551-ac87-e43385e0f76d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.404996] env[62346]: DEBUG oslo_concurrency.lockutils [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] Acquired lock "refresh_cache-88727b37-0f05-4551-ac87-e43385e0f76d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.405213] env[62346]: DEBUG nova.network.neutron [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Refreshing network info cache for port 5e0528c1-9d78-440e-9705-4b879060c95e {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1244.752195] env[62346]: DEBUG nova.network.neutron [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Updated VIF entry in instance network info cache for port 5e0528c1-9d78-440e-9705-4b879060c95e. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1244.752563] env[62346]: DEBUG nova.network.neutron [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Updating instance_info_cache with network_info: [{"id": "5e0528c1-9d78-440e-9705-4b879060c95e", "address": "fa:16:3e:0a:40:ca", "network": {"id": "8fd2ed7f-9f59-475e-acd6-38de7c00c978", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1185369202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f45e49e839f4cafaea598ac8f5fbd2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e0528c1-9d", "ovs_interfaceid": "5e0528c1-9d78-440e-9705-4b879060c95e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.763541] env[62346]: DEBUG oslo_concurrency.lockutils [req-a4265027-0939-4344-9a14-b0193a33d57f req-086348d5-05f4-4c3b-b930-702d93f81c19 service nova] Releasing lock "refresh_cache-88727b37-0f05-4551-ac87-e43385e0f76d" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.290470] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "88727b37-0f05-4551-ac87-e43385e0f76d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.221607] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.234431] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.234963] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.234963] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.235079] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1283.239026] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4424a65d-ad6d-4ecf-8ed2-3f5f3451110c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.245886] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf5aa09-e9bf-42e3-a10f-251b9564d036 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.261795] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7175ce-2b90-4f21-9e6a-daec2d4421bb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.268804] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42f9657-4a00-4efc-be0b-532dfd8e6f4b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.299621] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180570MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1283.299771] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.299973] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.381184] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c72a59f9-220d-4da4-8daa-2724ab255190 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.381370] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.381496] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.381617] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.381739] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.381857] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.381974] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.382107] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.382227] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.382342] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1283.394640] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.407108] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.418255] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.428931] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3027e833-8cb3-4ace-bc05-f8370630e1ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.440223] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c64fd0a9-d455-448f-bb53-82999adccf14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.451359] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3c523404-52dd-4248-bc92-a2d67b03009e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.460280] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 00208615-17d0-4e20-b1e9-80819181109e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.471206] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8f0203a5-3dc5-4a2d-9a96-07bed465d1eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.481526] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.491195] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9423ec7b-edb3-4cce-9aae-4c8076011284 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.500432] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21c12062-6eb2-4e25-b780-a3678b18d278 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.501121] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1283.501121] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '59', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_fd9cc5c4d97b46b290004d72385eea3a': '1', 'io_workload': '10', 'num_proj_04179eaf0efd4e43a9d4eb1445ffc270': '1', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_4bf15abf99ca484a9e823e9cf6595cd3': '1', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1283.756642] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c57ef7-f4b4-48e0-a51c-9f80c62f030e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.764747] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1e4a3a-129e-4ef1-98da-e701ca8ad570 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.795457] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24b2f9d-f63b-4ae0-aa22-3a89980eb825 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.803357] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bc04b3-6355-4100-9511-8b51e7b69b97 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.816738] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.825473] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.843210] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1283.843210] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.543s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.842619] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.023062] env[62346]: WARNING oslo_vmware.rw_handles [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1286.023062] env[62346]: ERROR oslo_vmware.rw_handles [ 1286.023062] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1286.025207] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1286.025460] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Copying Virtual Disk [datastore2] vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/b5ac5488-dde7-4f70-94b9-4d7c71f2b214/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1286.025773] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09bc6928-644b-40db-9293-b10e43e732a7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.035075] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Waiting for the task: (returnval){ [ 1286.035075] env[62346]: value = "task-4891709" [ 1286.035075] env[62346]: _type = "Task" [ 1286.035075] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.043492] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Task: {'id': task-4891709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.215548] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.220063] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.220226] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1286.220306] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1286.239855] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240028] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240167] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240305] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240434] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240559] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240678] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240798] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.240917] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.241042] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1286.241193] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1286.545896] env[62346]: DEBUG oslo_vmware.exceptions [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1286.546353] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.547295] env[62346]: ERROR nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1286.547295] env[62346]: Faults: ['InvalidArgument'] [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Traceback (most recent call last): [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] yield resources [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self.driver.spawn(context, instance, image_meta, [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self._fetch_image_if_missing(context, vi) [ 1286.547295] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] image_cache(vi, tmp_image_ds_loc) [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] vm_util.copy_virtual_disk( [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] session._wait_for_task(vmdk_copy_task) [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] return self.wait_for_task(task_ref) [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] return evt.wait() [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] result = hub.switch() [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1286.547777] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] return self.greenlet.switch() [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self.f(*self.args, **self.kw) [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] raise exceptions.translate_fault(task_info.error) [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Faults: ['InvalidArgument'] [ 1286.548159] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] [ 1286.548159] env[62346]: INFO nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Terminating instance [ 1286.550041] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.550238] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.550957] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1286.551167] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1286.551395] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edd2920a-1d87-486a-bbc2-fef9377e1193 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.553834] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5822ad30-28f6-4af5-aa2a-93db541dcc03 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.562325] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1286.562550] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0378ecd-cfcf-4283-b75c-9bcd12badc3b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.564924] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.565105] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1286.566112] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aba92ea2-6120-441f-bfeb-6ba7bdaad82d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.570868] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Waiting for the task: (returnval){ [ 1286.570868] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522f6d81-0ee8-1909-2151-a07bede8ee35" [ 1286.570868] env[62346]: _type = "Task" [ 1286.570868] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.587228] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1286.587472] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Creating directory with path [datastore2] vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.587710] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2e6d456-b383-4fec-b7ed-ca678aa1b9ff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.607941] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Created directory with path [datastore2] vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.608161] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Fetch image to [datastore2] vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1286.608341] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1286.609176] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ab30a1-638f-4aeb-b3e3-919affe8761c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.617018] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce13d70e-6a4c-4f5d-8f0b-ed7377cf9cf3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.626883] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bd00d7-f14a-46a2-b06e-0580b5542c95 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.631845] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1286.632094] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1286.632252] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Deleting the datastore file [datastore2] c72a59f9-220d-4da4-8daa-2724ab255190 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1286.632904] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d4109de-b14f-4c6b-9484-77f57501c478 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.663467] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843f59ae-1b9c-407a-b0cf-42d21a692b1a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.666446] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Waiting for the task: (returnval){ [ 1286.666446] env[62346]: value = "task-4891711" [ 1286.666446] env[62346]: _type = "Task" [ 1286.666446] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.672130] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ff998802-1338-42d3-ba4a-af2ffa64675c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.676854] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Task: {'id': task-4891711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.699328] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1286.754564] env[62346]: DEBUG oslo_vmware.rw_handles [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1286.814015] env[62346]: DEBUG oslo_vmware.rw_handles [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1286.814335] env[62346]: DEBUG oslo_vmware.rw_handles [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1287.176840] env[62346]: DEBUG oslo_vmware.api [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Task: {'id': task-4891711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068406} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.177253] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1287.177507] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1287.177770] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1287.178030] env[62346]: INFO nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1287.180407] env[62346]: DEBUG nova.compute.claims [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1287.180623] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.180911] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.220158] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.503973] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe855f9-3c4a-4512-983f-1e588a0e11b2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.513327] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdad5822-50a6-46b8-a10f-1c0dbfeae33d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.542950] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca9eb0a-dd74-4f95-8b62-bd1b0da76b52 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.550743] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1641d3-19f8-47f5-9e00-f62ceace3703 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.564587] env[62346]: DEBUG nova.compute.provider_tree [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.573374] env[62346]: DEBUG nova.scheduler.client.report [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1287.587397] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.406s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.587951] env[62346]: ERROR nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1287.587951] env[62346]: Faults: ['InvalidArgument'] [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Traceback (most recent call last): [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self.driver.spawn(context, instance, image_meta, [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self._fetch_image_if_missing(context, vi) [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] image_cache(vi, tmp_image_ds_loc) [ 1287.587951] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] vm_util.copy_virtual_disk( [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] session._wait_for_task(vmdk_copy_task) [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] return self.wait_for_task(task_ref) [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] return evt.wait() [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] result = hub.switch() [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] return self.greenlet.switch() [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1287.588290] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] self.f(*self.args, **self.kw) [ 1287.588623] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1287.588623] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] raise exceptions.translate_fault(task_info.error) [ 1287.588623] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1287.588623] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Faults: ['InvalidArgument'] [ 1287.588623] env[62346]: ERROR nova.compute.manager [instance: c72a59f9-220d-4da4-8daa-2724ab255190] [ 1287.588760] env[62346]: DEBUG nova.compute.utils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1287.590336] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Build of instance c72a59f9-220d-4da4-8daa-2724ab255190 was re-scheduled: A specified parameter was not correct: fileType [ 1287.590336] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1287.590836] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1287.591108] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1287.591447] env[62346]: DEBUG nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1287.591754] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1287.965972] env[62346]: DEBUG nova.network.neutron [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.985061] env[62346]: INFO nova.compute.manager [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Took 0.39 seconds to deallocate network for instance. [ 1288.087736] env[62346]: INFO nova.scheduler.client.report [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Deleted allocations for instance c72a59f9-220d-4da4-8daa-2724ab255190 [ 1288.111034] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6877b776-8f77-412f-8f4d-b74e907ec6bb tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "c72a59f9-220d-4da4-8daa-2724ab255190" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.947s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.112213] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "c72a59f9-220d-4da4-8daa-2724ab255190" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 392.539s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.112439] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Acquiring lock "c72a59f9-220d-4da4-8daa-2724ab255190-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.112748] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "c72a59f9-220d-4da4-8daa-2724ab255190-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.112834] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "c72a59f9-220d-4da4-8daa-2724ab255190-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.114711] env[62346]: INFO nova.compute.manager [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Terminating instance [ 1288.116777] env[62346]: DEBUG nova.compute.manager [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1288.116974] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1288.117294] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4acab942-6a8d-4786-bd4d-c15739d7061d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.123530] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1288.130338] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94f7a45-afe2-4612-ac36-8872aab34d3d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.165741] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c72a59f9-220d-4da4-8daa-2724ab255190 could not be found. [ 1288.165953] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1288.166187] env[62346]: INFO nova.compute.manager [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1288.166436] env[62346]: DEBUG oslo.service.loopingcall [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1288.169055] env[62346]: DEBUG nova.compute.manager [-] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1288.169159] env[62346]: DEBUG nova.network.neutron [-] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1288.183655] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.184018] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.185991] env[62346]: INFO nova.compute.claims [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1288.220583] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.223267] env[62346]: DEBUG nova.network.neutron [-] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.242355] env[62346]: INFO nova.compute.manager [-] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] Took 0.07 seconds to deallocate network for instance. [ 1288.347942] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c43e9d11-441a-4085-a753-881cbd1179da tempest-ServerRescueTestJSON-669854220 tempest-ServerRescueTestJSON-669854220-project-member] Lock "c72a59f9-220d-4da4-8daa-2724ab255190" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.236s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.348892] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "c72a59f9-220d-4da4-8daa-2724ab255190" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 156.671s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.349095] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c72a59f9-220d-4da4-8daa-2724ab255190] During sync_power_state the instance has a pending task (deleting). Skip. [ 1288.349280] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "c72a59f9-220d-4da4-8daa-2724ab255190" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.524341] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d88f787-9ee1-48c0-b532-7a103a6fff56 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.532990] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dd961f-9db5-46c4-8581-81a0923402b8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.567176] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014abd81-6cce-49b1-907e-67197a79125a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.575486] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00228854-3635-40ab-bd72-b928df8dfd46 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.590219] env[62346]: DEBUG nova.compute.provider_tree [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.598673] env[62346]: DEBUG nova.scheduler.client.report [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1288.613813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.429s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.613813] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1288.647995] env[62346]: DEBUG nova.compute.utils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1288.649503] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1288.649688] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1288.658177] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1288.705641] env[62346]: DEBUG nova.policy [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9eb444699bfe4137a12b88f71543f185', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20adb521b1574b8581a0c368923e38eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1288.727299] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1288.757313] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1288.757571] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1288.757966] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1288.757966] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1288.758113] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1288.758265] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1288.758507] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1288.758631] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1288.758801] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1288.758967] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1288.759161] env[62346]: DEBUG nova.virt.hardware [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1288.760239] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cb6a69-153e-4c05-ba11-6643a709d24a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.769405] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3fc090-83a1-41d3-aca7-de91b4fe36c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.124923] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Successfully created port: a015985a-1b77-494a-bbb4-dd57e0c71888 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1289.220652] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.220652] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1289.936812] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Successfully updated port: a015985a-1b77-494a-bbb4-dd57e0c71888 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1289.952056] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "refresh_cache-732fbaa9-beef-488f-9bf6-095ffa1fc1c4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.952230] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "refresh_cache-732fbaa9-beef-488f-9bf6-095ffa1fc1c4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.952409] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1290.000677] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1290.017727] env[62346]: DEBUG nova.compute.manager [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Received event network-vif-plugged-a015985a-1b77-494a-bbb4-dd57e0c71888 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1290.017998] env[62346]: DEBUG oslo_concurrency.lockutils [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] Acquiring lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.018225] env[62346]: DEBUG oslo_concurrency.lockutils [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.018396] env[62346]: DEBUG oslo_concurrency.lockutils [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.018591] env[62346]: DEBUG nova.compute.manager [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] No waiting events found dispatching network-vif-plugged-a015985a-1b77-494a-bbb4-dd57e0c71888 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1290.018837] env[62346]: WARNING nova.compute.manager [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Received unexpected event network-vif-plugged-a015985a-1b77-494a-bbb4-dd57e0c71888 for instance with vm_state building and task_state spawning. [ 1290.019204] env[62346]: DEBUG nova.compute.manager [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Received event network-changed-a015985a-1b77-494a-bbb4-dd57e0c71888 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1290.019424] env[62346]: DEBUG nova.compute.manager [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Refreshing instance network info cache due to event network-changed-a015985a-1b77-494a-bbb4-dd57e0c71888. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1290.019625] env[62346]: DEBUG oslo_concurrency.lockutils [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] Acquiring lock "refresh_cache-732fbaa9-beef-488f-9bf6-095ffa1fc1c4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.182251] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Updating instance_info_cache with network_info: [{"id": "a015985a-1b77-494a-bbb4-dd57e0c71888", "address": "fa:16:3e:3b:6f:f1", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa015985a-1b", "ovs_interfaceid": "a015985a-1b77-494a-bbb4-dd57e0c71888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.198420] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "refresh_cache-732fbaa9-beef-488f-9bf6-095ffa1fc1c4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.198526] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance network_info: |[{"id": "a015985a-1b77-494a-bbb4-dd57e0c71888", "address": "fa:16:3e:3b:6f:f1", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa015985a-1b", "ovs_interfaceid": "a015985a-1b77-494a-bbb4-dd57e0c71888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1290.198834] env[62346]: DEBUG oslo_concurrency.lockutils [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] Acquired lock "refresh_cache-732fbaa9-beef-488f-9bf6-095ffa1fc1c4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.199043] env[62346]: DEBUG nova.network.neutron [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Refreshing network info cache for port a015985a-1b77-494a-bbb4-dd57e0c71888 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1290.200198] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:6f:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a015985a-1b77-494a-bbb4-dd57e0c71888', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1290.207662] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating folder: Project (20adb521b1574b8581a0c368923e38eb). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1290.208442] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-611a6aeb-fcb6-462b-94b0-a448f13e2823 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.219986] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.221973] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created folder: Project (20adb521b1574b8581a0c368923e38eb) in parent group-v953204. [ 1290.222288] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating folder: Instances. Parent ref: group-v953286. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1290.222408] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-284eeb5f-bb96-4b24-9cfe-584697076692 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.231816] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created folder: Instances in parent group-v953286. [ 1290.232084] env[62346]: DEBUG oslo.service.loopingcall [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1290.232277] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1290.232484] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c55b68c-de81-41c1-8706-17991789c4c8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.255844] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1290.255844] env[62346]: value = "task-4891714" [ 1290.255844] env[62346]: _type = "Task" [ 1290.255844] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.264585] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891714, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.768849] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891714, 'name': CreateVM_Task, 'duration_secs': 0.306995} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.769585] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1290.770633] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.770922] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.771392] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1290.771811] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4c71d32-3d84-4ba2-b799-aa652074f6aa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.777463] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 1290.777463] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5294e3f9-a51c-a4e7-8407-7973ded11415" [ 1290.777463] env[62346]: _type = "Task" [ 1290.777463] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.787912] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5294e3f9-a51c-a4e7-8407-7973ded11415, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.788351] env[62346]: DEBUG nova.network.neutron [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Updated VIF entry in instance network info cache for port a015985a-1b77-494a-bbb4-dd57e0c71888. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1290.788677] env[62346]: DEBUG nova.network.neutron [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Updating instance_info_cache with network_info: [{"id": "a015985a-1b77-494a-bbb4-dd57e0c71888", "address": "fa:16:3e:3b:6f:f1", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa015985a-1b", "ovs_interfaceid": "a015985a-1b77-494a-bbb4-dd57e0c71888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.799566] env[62346]: DEBUG oslo_concurrency.lockutils [req-e605476d-e7da-4fcb-ad46-7abc10807d16 req-47e4e323-eeec-4484-808d-0344910a59e9 service nova] Releasing lock "refresh_cache-732fbaa9-beef-488f-9bf6-095ffa1fc1c4" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.289027] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.290045] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1291.290151] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.220401] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.984573] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.984573] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.344056] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.463676] env[62346]: WARNING oslo_vmware.rw_handles [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1332.463676] env[62346]: ERROR oslo_vmware.rw_handles [ 1332.463676] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1332.465997] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1332.466332] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Copying Virtual Disk [datastore2] vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/477fb2a2-8f76-4576-b09d-4adf3a637269/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1332.466659] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c72a3b3f-da7c-4722-85a0-81a100c61038 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.475783] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Waiting for the task: (returnval){ [ 1332.475783] env[62346]: value = "task-4891715" [ 1332.475783] env[62346]: _type = "Task" [ 1332.475783] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.484435] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Task: {'id': task-4891715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.991937] env[62346]: DEBUG oslo_vmware.exceptions [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1332.992319] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.992973] env[62346]: ERROR nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1332.992973] env[62346]: Faults: ['InvalidArgument'] [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Traceback (most recent call last): [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] yield resources [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self.driver.spawn(context, instance, image_meta, [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self._fetch_image_if_missing(context, vi) [ 1332.992973] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] image_cache(vi, tmp_image_ds_loc) [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] vm_util.copy_virtual_disk( [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] session._wait_for_task(vmdk_copy_task) [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] return self.wait_for_task(task_ref) [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] return evt.wait() [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] result = hub.switch() [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1332.993337] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] return self.greenlet.switch() [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self.f(*self.args, **self.kw) [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] raise exceptions.translate_fault(task_info.error) [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Faults: ['InvalidArgument'] [ 1332.993690] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] [ 1332.993690] env[62346]: INFO nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Terminating instance [ 1332.995690] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.995690] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1332.995690] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17ae8b40-c9d5-4a7a-bd90-2fccee8f7d2f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.998328] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1332.998558] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1332.999423] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3abff8-f8ab-4b8f-abec-cc783516ede8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.007359] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1333.007607] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00f8741a-bcbd-4405-a98d-b81b98cc07bf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.010145] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1333.010326] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1333.011479] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23415283-2710-4abe-87ca-9167a2642e49 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.017805] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Waiting for the task: (returnval){ [ 1333.017805] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5215227a-c016-cb23-e9fa-89c10f5e42a6" [ 1333.017805] env[62346]: _type = "Task" [ 1333.017805] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.027347] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5215227a-c016-cb23-e9fa-89c10f5e42a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.083519] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1333.083818] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1333.084100] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Deleting the datastore file [datastore2] af33f439-7ebe-478a-83ee-f7fc8e7b630d {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1333.084314] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f950eef0-ed2a-43f1-bc18-2e19f4b4cd94 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.092076] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Waiting for the task: (returnval){ [ 1333.092076] env[62346]: value = "task-4891717" [ 1333.092076] env[62346]: _type = "Task" [ 1333.092076] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.100401] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Task: {'id': task-4891717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.529675] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1333.530790] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Creating directory with path [datastore2] vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1333.530790] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7e55848-1ea7-46e8-8419-52dfbaa98358 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.542911] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Created directory with path [datastore2] vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1333.543436] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Fetch image to [datastore2] vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1333.543613] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1333.544519] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b2fc49-b434-4d38-8ab6-5f1bdf4faf6c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.552830] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd9619d-53ac-448c-96df-ac23be0083ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.562982] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168e8b86-48cb-4619-b0fe-40f093f1090f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.602642] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1f488a-17ab-4536-b547-f755803dd2d1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.612959] env[62346]: DEBUG oslo_vmware.api [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Task: {'id': task-4891717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078043} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.615074] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.615336] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1333.615578] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1333.615819] env[62346]: INFO nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1333.618585] env[62346]: DEBUG nova.compute.claims [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1333.618862] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.619250] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.623697] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-47fc2ebf-07b7-4388-b89f-6f7698b6b04b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.653865] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1333.708883] env[62346]: DEBUG oslo_vmware.rw_handles [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1333.771984] env[62346]: DEBUG oslo_vmware.rw_handles [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1333.772244] env[62346]: DEBUG oslo_vmware.rw_handles [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1333.990333] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f430ce-77ad-4353-bdbc-60a31ad0479a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.999057] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb55441-8c85-4372-a675-2748e8638d23 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.030520] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21723468-736a-4baf-af65-19878281055a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.038829] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3e5619-6862-471c-8ec3-6f52bc082c25 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.053638] env[62346]: DEBUG nova.compute.provider_tree [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1334.063146] env[62346]: DEBUG nova.scheduler.client.report [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.084950] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.466s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.085570] env[62346]: ERROR nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1334.085570] env[62346]: Faults: ['InvalidArgument'] [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Traceback (most recent call last): [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self.driver.spawn(context, instance, image_meta, [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self._fetch_image_if_missing(context, vi) [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] image_cache(vi, tmp_image_ds_loc) [ 1334.085570] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] vm_util.copy_virtual_disk( [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] session._wait_for_task(vmdk_copy_task) [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] return self.wait_for_task(task_ref) [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] return evt.wait() [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] result = hub.switch() [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] return self.greenlet.switch() [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1334.085910] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] self.f(*self.args, **self.kw) [ 1334.086269] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1334.086269] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] raise exceptions.translate_fault(task_info.error) [ 1334.086269] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1334.086269] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Faults: ['InvalidArgument'] [ 1334.086269] env[62346]: ERROR nova.compute.manager [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] [ 1334.086405] env[62346]: DEBUG nova.compute.utils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1334.087832] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Build of instance af33f439-7ebe-478a-83ee-f7fc8e7b630d was re-scheduled: A specified parameter was not correct: fileType [ 1334.087832] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1334.088222] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1334.088396] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1334.088565] env[62346]: DEBUG nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1334.088727] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1334.429507] env[62346]: DEBUG nova.network.neutron [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.447336] env[62346]: INFO nova.compute.manager [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Took 0.36 seconds to deallocate network for instance. [ 1334.568700] env[62346]: INFO nova.scheduler.client.report [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Deleted allocations for instance af33f439-7ebe-478a-83ee-f7fc8e7b630d [ 1334.597644] env[62346]: DEBUG oslo_concurrency.lockutils [None req-88bf243b-8865-41bb-bfd0-76420792a968 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 580.865s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.598712] env[62346]: DEBUG oslo_concurrency.lockutils [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 384.773s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.599244] env[62346]: DEBUG oslo_concurrency.lockutils [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Acquiring lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.599244] env[62346]: DEBUG oslo_concurrency.lockutils [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.599402] env[62346]: DEBUG oslo_concurrency.lockutils [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.603667] env[62346]: INFO nova.compute.manager [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Terminating instance [ 1334.605674] env[62346]: DEBUG nova.compute.manager [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1334.605885] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1334.606408] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9404bbb9-f108-4b35-abd6-86503bf77c2f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.617173] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997c57a0-627b-457f-9d62-adc2154138f8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.629734] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1334.657486] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance af33f439-7ebe-478a-83ee-f7fc8e7b630d could not be found. [ 1334.657602] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1334.659067] env[62346]: INFO nova.compute.manager [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1334.659067] env[62346]: DEBUG oslo.service.loopingcall [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1334.659067] env[62346]: DEBUG nova.compute.manager [-] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1334.659067] env[62346]: DEBUG nova.network.neutron [-] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1334.683394] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.683705] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.685403] env[62346]: INFO nova.compute.claims [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.690394] env[62346]: DEBUG nova.network.neutron [-] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.741417] env[62346]: INFO nova.compute.manager [-] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] Took 0.08 seconds to deallocate network for instance. [ 1334.880826] env[62346]: DEBUG oslo_concurrency.lockutils [None req-91718f91-b09f-4398-bece-74a5b13cf896 tempest-ServerAddressesTestJSON-1511980039 tempest-ServerAddressesTestJSON-1511980039-project-member] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.282s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.881751] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 203.203s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.882891] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: af33f439-7ebe-478a-83ee-f7fc8e7b630d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1334.882891] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "af33f439-7ebe-478a-83ee-f7fc8e7b630d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.056625] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d072e50f-eb54-4bdf-b6ba-013a8beb3efa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.064939] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8187e6-6990-46c6-9bec-4266f1be33f3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.095818] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3503bad-95b1-4652-8ee6-ba58a4d5225c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.104134] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f957a5-4658-490c-9a7b-961e162d2559 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.118275] env[62346]: DEBUG nova.compute.provider_tree [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1335.128532] env[62346]: DEBUG nova.scheduler.client.report [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1335.143021] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.459s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.143580] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1335.187514] env[62346]: DEBUG nova.compute.utils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1335.189025] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1335.189025] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1335.199467] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1335.256126] env[62346]: DEBUG nova.policy [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcb28b02058243b5ba5328b6d5f69f67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f059d4d596ee4d2abf7190a5806dd848', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1335.271979] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1335.298703] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1335.298967] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1335.299143] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1335.299337] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1335.299485] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1335.299633] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1335.299852] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1335.300028] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1335.300208] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1335.300371] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1335.300546] env[62346]: DEBUG nova.virt.hardware [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1335.301463] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfadd3d7-b297-4ee4-b9a3-477dfb11c902 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.310925] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7de785-8e8e-4c63-9599-59994611fa37 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.637645] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Successfully created port: f94cc63a-38b2-45e0-a4ff-8783b7a2c358 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1336.172537] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Successfully created port: ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1336.839478] env[62346]: DEBUG nova.compute.manager [req-c45645b2-9e29-4c1c-b522-3bcab04f9204 req-6d3db4a8-1e41-4acd-8d48-dd4078d32a37 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Received event network-vif-plugged-f94cc63a-38b2-45e0-a4ff-8783b7a2c358 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1336.839779] env[62346]: DEBUG oslo_concurrency.lockutils [req-c45645b2-9e29-4c1c-b522-3bcab04f9204 req-6d3db4a8-1e41-4acd-8d48-dd4078d32a37 service nova] Acquiring lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.839779] env[62346]: DEBUG oslo_concurrency.lockutils [req-c45645b2-9e29-4c1c-b522-3bcab04f9204 req-6d3db4a8-1e41-4acd-8d48-dd4078d32a37 service nova] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.840456] env[62346]: DEBUG oslo_concurrency.lockutils [req-c45645b2-9e29-4c1c-b522-3bcab04f9204 req-6d3db4a8-1e41-4acd-8d48-dd4078d32a37 service nova] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.840456] env[62346]: DEBUG nova.compute.manager [req-c45645b2-9e29-4c1c-b522-3bcab04f9204 req-6d3db4a8-1e41-4acd-8d48-dd4078d32a37 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] No waiting events found dispatching network-vif-plugged-f94cc63a-38b2-45e0-a4ff-8783b7a2c358 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1336.840456] env[62346]: WARNING nova.compute.manager [req-c45645b2-9e29-4c1c-b522-3bcab04f9204 req-6d3db4a8-1e41-4acd-8d48-dd4078d32a37 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Received unexpected event network-vif-plugged-f94cc63a-38b2-45e0-a4ff-8783b7a2c358 for instance with vm_state building and task_state spawning. [ 1336.968224] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Successfully updated port: f94cc63a-38b2-45e0-a4ff-8783b7a2c358 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1338.070873] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Successfully updated port: ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1338.083351] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.083521] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquired lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.083681] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1338.157713] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1338.659790] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Updating instance_info_cache with network_info: [{"id": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "address": "fa:16:3e:13:97:6b", "network": {"id": "eadc9cdb-99aa-496b-8b41-c35ea641eb05", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1635999371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf94cc63a-38", "ovs_interfaceid": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "address": "fa:16:3e:c5:cb:b2", "network": {"id": "778891de-a4e8-4468-b534-d9899dabfe5c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1124436915", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad92fe19-26", "ovs_interfaceid": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.678830] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Releasing lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.679218] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance network_info: |[{"id": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "address": "fa:16:3e:13:97:6b", "network": {"id": "eadc9cdb-99aa-496b-8b41-c35ea641eb05", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1635999371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf94cc63a-38", "ovs_interfaceid": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "address": "fa:16:3e:c5:cb:b2", "network": {"id": "778891de-a4e8-4468-b534-d9899dabfe5c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1124436915", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad92fe19-26", "ovs_interfaceid": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1338.681894] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:97:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496ac502-bfc4-4324-8332-cac473eb7cc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f94cc63a-38b2-45e0-a4ff-8783b7a2c358', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:cb:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '132fdc50-e144-4a9b-8d77-6378eec02d9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad92fe19-26cf-4a4d-a1d1-12cb8503ad20', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1338.690743] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Creating folder: Project (f059d4d596ee4d2abf7190a5806dd848). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1338.691852] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9517f5f6-be06-4726-a17b-9f9ff65ea2d1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.705063] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Created folder: Project (f059d4d596ee4d2abf7190a5806dd848) in parent group-v953204. [ 1338.705063] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Creating folder: Instances. Parent ref: group-v953289. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1338.705063] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22b47289-1d86-4a59-9ff8-f792bf7502b9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.714251] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Created folder: Instances in parent group-v953289. [ 1338.715867] env[62346]: DEBUG oslo.service.loopingcall [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1338.715867] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1338.715867] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0645dc24-444c-4479-b2a9-897e0b2fd218 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.739039] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1338.739039] env[62346]: value = "task-4891720" [ 1338.739039] env[62346]: _type = "Task" [ 1338.739039] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.748681] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891720, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.887698] env[62346]: DEBUG nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Received event network-changed-f94cc63a-38b2-45e0-a4ff-8783b7a2c358 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1338.887931] env[62346]: DEBUG nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Refreshing instance network info cache due to event network-changed-f94cc63a-38b2-45e0-a4ff-8783b7a2c358. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1338.889140] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Acquiring lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.889405] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Acquired lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.890678] env[62346]: DEBUG nova.network.neutron [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Refreshing network info cache for port f94cc63a-38b2-45e0-a4ff-8783b7a2c358 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1339.252297] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891720, 'name': CreateVM_Task, 'duration_secs': 0.361767} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.252742] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1339.253635] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.254046] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.254443] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1339.254740] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ace9945-b2cd-4249-8f04-e94c06265a89 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.260983] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Waiting for the task: (returnval){ [ 1339.260983] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ea7582-b8fe-6b4f-56e8-a857e79be9d0" [ 1339.260983] env[62346]: _type = "Task" [ 1339.260983] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.274348] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ea7582-b8fe-6b4f-56e8-a857e79be9d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.395608] env[62346]: DEBUG nova.network.neutron [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Updated VIF entry in instance network info cache for port f94cc63a-38b2-45e0-a4ff-8783b7a2c358. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1339.396264] env[62346]: DEBUG nova.network.neutron [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Updating instance_info_cache with network_info: [{"id": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "address": "fa:16:3e:13:97:6b", "network": {"id": "eadc9cdb-99aa-496b-8b41-c35ea641eb05", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1635999371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf94cc63a-38", "ovs_interfaceid": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "address": "fa:16:3e:c5:cb:b2", "network": {"id": "778891de-a4e8-4468-b534-d9899dabfe5c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1124436915", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad92fe19-26", "ovs_interfaceid": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.408603] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Releasing lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.408603] env[62346]: DEBUG nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Received event network-vif-plugged-ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1339.408603] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Acquiring lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.408603] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.408603] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.408603] env[62346]: DEBUG nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] No waiting events found dispatching network-vif-plugged-ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1339.408603] env[62346]: WARNING nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Received unexpected event network-vif-plugged-ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 for instance with vm_state building and task_state spawning. [ 1339.408603] env[62346]: DEBUG nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Received event network-changed-ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1339.408603] env[62346]: DEBUG nova.compute.manager [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Refreshing instance network info cache due to event network-changed-ad92fe19-26cf-4a4d-a1d1-12cb8503ad20. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1339.408603] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Acquiring lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.408603] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Acquired lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.408603] env[62346]: DEBUG nova.network.neutron [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Refreshing network info cache for port ad92fe19-26cf-4a4d-a1d1-12cb8503ad20 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1339.738665] env[62346]: DEBUG nova.network.neutron [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Updated VIF entry in instance network info cache for port ad92fe19-26cf-4a4d-a1d1-12cb8503ad20. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1339.738665] env[62346]: DEBUG nova.network.neutron [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Updating instance_info_cache with network_info: [{"id": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "address": "fa:16:3e:13:97:6b", "network": {"id": "eadc9cdb-99aa-496b-8b41-c35ea641eb05", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1635999371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf94cc63a-38", "ovs_interfaceid": "f94cc63a-38b2-45e0-a4ff-8783b7a2c358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "address": "fa:16:3e:c5:cb:b2", "network": {"id": "778891de-a4e8-4468-b534-d9899dabfe5c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1124436915", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f059d4d596ee4d2abf7190a5806dd848", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad92fe19-26", "ovs_interfaceid": "ad92fe19-26cf-4a4d-a1d1-12cb8503ad20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.754939] env[62346]: DEBUG oslo_concurrency.lockutils [req-767fa577-a8da-4407-ae54-d1e8ce7acf03 req-8ee5c137-a727-4bf9-9881-b41d901b44a6 service nova] Releasing lock "refresh_cache-21910ef4-a1af-4064-bf9e-350f78a938ae" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.778661] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.778661] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1339.778661] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.108860] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.108860] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.220217] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.234734] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.235009] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.235261] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.235424] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1345.236553] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6221d79-6989-45f9-8e62-ae9121ca6d4e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.252306] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae000f1f-3991-4fd7-9e71-ca1dba568c8c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.268180] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf37790a-226f-4789-a7c4-915c176b3b33 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.275976] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3149472-a739-4a81-801e-42ffbd7f7524 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.311025] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180567MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1345.311199] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.311407] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.418059] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.418194] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.418321] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.418444] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.418704] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.418845] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.418958] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.419086] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.419207] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.419342] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1345.433692] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.457966] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3027e833-8cb3-4ace-bc05-f8370630e1ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.471182] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c64fd0a9-d455-448f-bb53-82999adccf14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.489278] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 3c523404-52dd-4248-bc92-a2d67b03009e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.503719] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 00208615-17d0-4e20-b1e9-80819181109e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.515447] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8f0203a5-3dc5-4a2d-9a96-07bed465d1eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.527931] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.539523] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9423ec7b-edb3-4cce-9aae-4c8076011284 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.556118] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21c12062-6eb2-4e25-b780-a3678b18d278 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.568128] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.579142] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1345.579398] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1345.579581] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '61', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_a625cf2514b140fcb029253fd85f6ee8': '1', 'io_workload': '10', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_4bf15abf99ca484a9e823e9cf6595cd3': '1', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_task_spawning': '1', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1345.965854] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c387675-3295-4749-9d9b-2f393baf7c8a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.976865] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7c53d3-2a18-4572-9a35-8ff572d9e9ed {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.012916] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed23caa2-bd86-488d-9cda-504b5fc696ef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.024464] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccc30eb-ac88-44a1-8315-02595afeaf59 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.043981] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.057532] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1346.076751] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1346.076958] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.766s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.077189] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.215435] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.219662] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.219817] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1347.219935] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1347.245710] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.245901] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.246017] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.248401] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.248573] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.248707] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.248832] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.255024] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.255024] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.255024] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1347.255024] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1349.220535] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.220063] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.220286] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.220351] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1352.216709] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.249786] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.220854] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.547333] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "21910ef4-a1af-4064-bf9e-350f78a938ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.684715] env[62346]: WARNING oslo_vmware.rw_handles [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1380.684715] env[62346]: ERROR oslo_vmware.rw_handles [ 1380.685319] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1380.687138] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1380.687380] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Copying Virtual Disk [datastore2] vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/9be1babb-8d29-48e8-83e6-3b0b18d925d5/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1380.688048] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ce61b34-5a45-484a-8f45-52db5612514d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.695633] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Waiting for the task: (returnval){ [ 1380.695633] env[62346]: value = "task-4891721" [ 1380.695633] env[62346]: _type = "Task" [ 1380.695633] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.704686] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Task: {'id': task-4891721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.206483] env[62346]: DEBUG oslo_vmware.exceptions [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1381.206803] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.207376] env[62346]: ERROR nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.207376] env[62346]: Faults: ['InvalidArgument'] [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Traceback (most recent call last): [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] yield resources [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self.driver.spawn(context, instance, image_meta, [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self._fetch_image_if_missing(context, vi) [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] image_cache(vi, tmp_image_ds_loc) [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] vm_util.copy_virtual_disk( [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] session._wait_for_task(vmdk_copy_task) [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] return self.wait_for_task(task_ref) [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] return evt.wait() [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] result = hub.switch() [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] return self.greenlet.switch() [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self.f(*self.args, **self.kw) [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] raise exceptions.translate_fault(task_info.error) [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Faults: ['InvalidArgument'] [ 1381.207376] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] [ 1381.208442] env[62346]: INFO nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Terminating instance [ 1381.209283] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.209503] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.209746] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18a2f9fc-c5e5-4fbb-9bc9-b01408d99d40 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.213058] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1381.213058] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1381.213269] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d40dac-9490-4072-8f18-33aedc77adc4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.220780] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1381.221043] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f50d592b-ffbb-4452-a82d-28fcc0f56a54 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.223557] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.223739] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1381.224739] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-003f16ba-489c-42c0-98bb-978ded300012 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.230372] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1381.230372] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f17b50-c3cf-91df-e9ec-54469fa3ab70" [ 1381.230372] env[62346]: _type = "Task" [ 1381.230372] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.239319] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f17b50-c3cf-91df-e9ec-54469fa3ab70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.300339] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1381.300573] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1381.300896] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Deleting the datastore file [datastore2] 6155a6c2-3d55-4fe6-bade-a97db98796a0 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1381.301204] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66cee2b5-aadc-43a0-b522-58787f8631ca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.309154] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Waiting for the task: (returnval){ [ 1381.309154] env[62346]: value = "task-4891723" [ 1381.309154] env[62346]: _type = "Task" [ 1381.309154] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.319398] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Task: {'id': task-4891723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.742629] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1381.742919] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating directory with path [datastore2] vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.743251] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3148d74c-85a2-4340-9df2-69c0277142e9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.756853] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Created directory with path [datastore2] vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.756997] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Fetch image to [datastore2] vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1381.757185] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1381.758019] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe15a66-1a97-46a7-bc08-85daaf40ec8d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.765827] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a649b324-802a-4c68-9f5a-dbe0c377abba {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.778210] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8572b1f-fc73-4c09-b82d-2145581e89eb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.808737] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2f7df8-6310-4692-a674-7c5974095401 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.820706] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b0ebb883-0b84-480a-800d-2f11a8eefbc5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.822547] env[62346]: DEBUG oslo_vmware.api [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Task: {'id': task-4891723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077586} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.822791] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.822969] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1381.823155] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1381.823329] env[62346]: INFO nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1381.825838] env[62346]: DEBUG nova.compute.claims [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1381.826017] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.826235] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.850421] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1381.905989] env[62346]: DEBUG oslo_vmware.rw_handles [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1381.970082] env[62346]: DEBUG oslo_vmware.rw_handles [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1381.970082] env[62346]: DEBUG oslo_vmware.rw_handles [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1382.199019] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc459dbc-7d15-458b-b827-5db912e6e5cf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.207735] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a51c537-b77e-4495-af5b-30ec0f5b5f63 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.239274] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f4c52c-5dd4-4d3d-a070-b27af8627967 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.247666] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05e70b4-c58f-44ce-98e1-5bc81f0f6d6c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.261852] env[62346]: DEBUG nova.compute.provider_tree [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.272636] env[62346]: DEBUG nova.scheduler.client.report [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.288429] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.462s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.289038] env[62346]: ERROR nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1382.289038] env[62346]: Faults: ['InvalidArgument'] [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Traceback (most recent call last): [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self.driver.spawn(context, instance, image_meta, [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self._fetch_image_if_missing(context, vi) [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] image_cache(vi, tmp_image_ds_loc) [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] vm_util.copy_virtual_disk( [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] session._wait_for_task(vmdk_copy_task) [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] return self.wait_for_task(task_ref) [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] return evt.wait() [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] result = hub.switch() [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] return self.greenlet.switch() [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] self.f(*self.args, **self.kw) [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] raise exceptions.translate_fault(task_info.error) [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Faults: ['InvalidArgument'] [ 1382.289038] env[62346]: ERROR nova.compute.manager [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] [ 1382.289915] env[62346]: DEBUG nova.compute.utils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1382.292859] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Build of instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 was re-scheduled: A specified parameter was not correct: fileType [ 1382.292859] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1382.293291] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1382.293466] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1382.293643] env[62346]: DEBUG nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1382.293805] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1382.671303] env[62346]: DEBUG nova.network.neutron [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.684109] env[62346]: INFO nova.compute.manager [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Took 0.39 seconds to deallocate network for instance. [ 1382.823677] env[62346]: INFO nova.scheduler.client.report [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Deleted allocations for instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 [ 1382.844715] env[62346]: DEBUG oslo_concurrency.lockutils [None req-74dcce07-9826-4604-93b1-a9fb3afcf065 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.997s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.846107] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 390.008s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.846347] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Acquiring lock "6155a6c2-3d55-4fe6-bade-a97db98796a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.846694] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.846911] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.849773] env[62346]: INFO nova.compute.manager [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Terminating instance [ 1382.851847] env[62346]: DEBUG nova.compute.manager [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1382.852111] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1382.852722] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-470fa377-585d-40ea-bcb6-9921e5e021a5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.863740] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601e7922-2605-4131-b05a-d32c86e84075 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.876283] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1382.902984] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6155a6c2-3d55-4fe6-bade-a97db98796a0 could not be found. [ 1382.903226] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1382.903437] env[62346]: INFO nova.compute.manager [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1382.903699] env[62346]: DEBUG oslo.service.loopingcall [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.903950] env[62346]: DEBUG nova.compute.manager [-] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1382.904059] env[62346]: DEBUG nova.network.neutron [-] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1382.932304] env[62346]: DEBUG nova.network.neutron [-] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.940896] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.941166] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.943079] env[62346]: INFO nova.compute.claims [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1382.946214] env[62346]: INFO nova.compute.manager [-] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] Took 0.04 seconds to deallocate network for instance. [ 1383.040607] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2dc45630-3bb8-4885-8aec-4f95a93b6468 tempest-ImagesNegativeTestJSON-1325206643 tempest-ImagesNegativeTestJSON-1325206643-project-member] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.042267] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 251.363s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.042267] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 6155a6c2-3d55-4fe6-bade-a97db98796a0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1383.042267] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "6155a6c2-3d55-4fe6-bade-a97db98796a0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.242836] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f498893-0edb-4853-b084-618be10a7adf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.251419] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c704be-4710-4219-af02-1b3a9e265134 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.281409] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83024e7b-9142-492f-8739-f87511544d43 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.291047] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97b1c73-ed35-49ab-876e-956a8ed5ef4c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.305737] env[62346]: DEBUG nova.compute.provider_tree [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.317709] env[62346]: DEBUG nova.scheduler.client.report [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1383.354593] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.413s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.355196] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1383.390468] env[62346]: DEBUG nova.compute.utils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1383.392152] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1383.392340] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1383.406371] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1383.463183] env[62346]: DEBUG nova.policy [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d43e25d1edd4053a15a8027cbd8529f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8317b2c0c98049fe8044a0edb4bca89c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1383.473762] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1383.505662] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1383.505949] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1383.506127] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.506314] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1383.506462] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.506605] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1383.506844] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1383.507070] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1383.507266] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1383.507474] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1383.507658] env[62346]: DEBUG nova.virt.hardware [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.509250] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45762731-8418-4774-a2d0-ee86ec8f5574 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.518890] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f6b6e5-8fcf-464a-9ca6-64915220d570 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.888852] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Successfully created port: efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1384.619417] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Successfully updated port: efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1384.637395] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "refresh_cache-c6d55895-0a7a-4088-a065-3337c6045878" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.637395] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired lock "refresh_cache-c6d55895-0a7a-4088-a065-3337c6045878" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.637566] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1384.696508] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1384.746234] env[62346]: DEBUG nova.compute.manager [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Received event network-vif-plugged-efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1384.746571] env[62346]: DEBUG oslo_concurrency.lockutils [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] Acquiring lock "c6d55895-0a7a-4088-a065-3337c6045878-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.746679] env[62346]: DEBUG oslo_concurrency.lockutils [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] Lock "c6d55895-0a7a-4088-a065-3337c6045878-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.746844] env[62346]: DEBUG oslo_concurrency.lockutils [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] Lock "c6d55895-0a7a-4088-a065-3337c6045878-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.747012] env[62346]: DEBUG nova.compute.manager [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] No waiting events found dispatching network-vif-plugged-efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1384.747183] env[62346]: WARNING nova.compute.manager [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Received unexpected event network-vif-plugged-efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 for instance with vm_state building and task_state spawning. [ 1384.747342] env[62346]: DEBUG nova.compute.manager [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Received event network-changed-efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1384.747494] env[62346]: DEBUG nova.compute.manager [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Refreshing instance network info cache due to event network-changed-efdd5222-0dd1-49ef-a818-6e7ddc4c39a4. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1384.747657] env[62346]: DEBUG oslo_concurrency.lockutils [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] Acquiring lock "refresh_cache-c6d55895-0a7a-4088-a065-3337c6045878" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.972647] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Updating instance_info_cache with network_info: [{"id": "efdd5222-0dd1-49ef-a818-6e7ddc4c39a4", "address": "fa:16:3e:2d:a4:fc", "network": {"id": "2779d93f-257d-463e-b8de-ec5bc3180dba", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-521829042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8317b2c0c98049fe8044a0edb4bca89c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefdd5222-0d", "ovs_interfaceid": "efdd5222-0dd1-49ef-a818-6e7ddc4c39a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.985048] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Releasing lock "refresh_cache-c6d55895-0a7a-4088-a065-3337c6045878" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.985375] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance network_info: |[{"id": "efdd5222-0dd1-49ef-a818-6e7ddc4c39a4", "address": "fa:16:3e:2d:a4:fc", "network": {"id": "2779d93f-257d-463e-b8de-ec5bc3180dba", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-521829042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8317b2c0c98049fe8044a0edb4bca89c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefdd5222-0d", "ovs_interfaceid": "efdd5222-0dd1-49ef-a818-6e7ddc4c39a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1384.985722] env[62346]: DEBUG oslo_concurrency.lockutils [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] Acquired lock "refresh_cache-c6d55895-0a7a-4088-a065-3337c6045878" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.985904] env[62346]: DEBUG nova.network.neutron [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Refreshing network info cache for port efdd5222-0dd1-49ef-a818-6e7ddc4c39a4 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1384.987762] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:a4:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efdd5222-0dd1-49ef-a818-6e7ddc4c39a4', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1384.996537] env[62346]: DEBUG oslo.service.loopingcall [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1384.997598] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1384.999952] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a1c28eb-f0e7-4481-ae11-a51318af9178 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.023888] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1385.023888] env[62346]: value = "task-4891724" [ 1385.023888] env[62346]: _type = "Task" [ 1385.023888] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.033069] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891724, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.373083] env[62346]: DEBUG nova.network.neutron [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Updated VIF entry in instance network info cache for port efdd5222-0dd1-49ef-a818-6e7ddc4c39a4. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1385.373445] env[62346]: DEBUG nova.network.neutron [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Updating instance_info_cache with network_info: [{"id": "efdd5222-0dd1-49ef-a818-6e7ddc4c39a4", "address": "fa:16:3e:2d:a4:fc", "network": {"id": "2779d93f-257d-463e-b8de-ec5bc3180dba", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-521829042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8317b2c0c98049fe8044a0edb4bca89c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefdd5222-0d", "ovs_interfaceid": "efdd5222-0dd1-49ef-a818-6e7ddc4c39a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.383106] env[62346]: DEBUG oslo_concurrency.lockutils [req-917fcb32-1dd4-42e3-b26f-f6d38fb0e859 req-41729972-444a-40dd-a5f9-0264f22ba0b5 service nova] Releasing lock "refresh_cache-c6d55895-0a7a-4088-a065-3337c6045878" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.535557] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891724, 'name': CreateVM_Task, 'duration_secs': 0.317061} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.535756] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1385.536421] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.536589] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.536914] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1385.537197] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42a5628b-ed25-4c55-a687-dc005de2f7a0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.541729] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1385.541729] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52aa0d8d-d5a1-ae30-a62c-18e6229bf20b" [ 1385.541729] env[62346]: _type = "Task" [ 1385.541729] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.555944] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52aa0d8d-d5a1-ae30-a62c-18e6229bf20b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.053790] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.054131] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1386.054276] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.396555] env[62346]: DEBUG oslo_concurrency.lockutils [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "c6d55895-0a7a-4088-a065-3337c6045878" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.713716] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "daef9d9c-03a6-4ee8-9806-9d895f802776" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.714094] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.020344] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "5af6a907-80d7-4630-aa01-c600e4908d32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.020344] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "5af6a907-80d7-4630-aa01-c600e4908d32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.220365] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.233180] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.233441] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.233683] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.233837] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1407.235373] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea99922d-fbbf-4cb3-9180-5d5ee3ec34ba {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.245353] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e5b7ee-bbd5-4e85-b316-dd40990a9f63 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.259755] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1caf7d-6a08-40e2-8d5a-470d253ebc31 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.266838] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76fe23d-cad7-40fa-aaaa-5bc56ff22001 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.298653] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180577MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1407.298917] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.299009] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.376820] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.376990] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377130] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377252] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377368] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377482] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377603] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377717] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377830] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.377940] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1407.389928] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 00208615-17d0-4e20-b1e9-80819181109e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.402798] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8f0203a5-3dc5-4a2d-9a96-07bed465d1eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.412930] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.424538] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 9423ec7b-edb3-4cce-9aae-4c8076011284 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.434505] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21c12062-6eb2-4e25-b780-a3678b18d278 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.446145] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.457065] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.468527] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.479181] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1407.479181] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1407.479181] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '62', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '2', 'io_workload': '10', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_4bf15abf99ca484a9e823e9cf6595cd3': '1', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1407.734452] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6984029-9fa8-465a-92f8-9fea8b5220d3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.743638] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfffda3-aaa2-4a09-82d9-a72c2597c7f3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.774543] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2717ac5-b467-4482-8a49-71d18f7e6ad8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.782787] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64f1052-a7c6-4f26-a20a-fc371f40bccf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.797753] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.809340] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1407.825908] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1407.826119] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.527s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.826345] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.826488] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1407.843625] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] There are 0 instances to clean {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1408.838615] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.838923] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.220414] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.220612] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1409.220737] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1409.245145] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.245145] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.245433] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.245578] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.245732] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.245887] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.246031] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.246157] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.246272] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.246386] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1409.246502] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1410.219795] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.220198] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1411.220859] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.220859] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.220023] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.221758] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.222085] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.228505] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.228505] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances with incomplete migration {{(pid=62346) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1431.075105] env[62346]: WARNING oslo_vmware.rw_handles [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1431.075105] env[62346]: ERROR oslo_vmware.rw_handles [ 1431.075803] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1431.078283] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1431.078541] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Copying Virtual Disk [datastore2] vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/8ee3d9e2-ef6a-440a-be82-03b5e2371554/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1431.078823] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6832a78-db90-47ff-9328-f0b1831956b5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.088625] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1431.088625] env[62346]: value = "task-4891725" [ 1431.088625] env[62346]: _type = "Task" [ 1431.088625] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.097870] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': task-4891725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.599523] env[62346]: DEBUG oslo_vmware.exceptions [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1431.599825] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.600407] env[62346]: ERROR nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1431.600407] env[62346]: Faults: ['InvalidArgument'] [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Traceback (most recent call last): [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] yield resources [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self.driver.spawn(context, instance, image_meta, [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self._fetch_image_if_missing(context, vi) [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] image_cache(vi, tmp_image_ds_loc) [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] vm_util.copy_virtual_disk( [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] session._wait_for_task(vmdk_copy_task) [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] return self.wait_for_task(task_ref) [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] return evt.wait() [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] result = hub.switch() [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] return self.greenlet.switch() [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self.f(*self.args, **self.kw) [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] raise exceptions.translate_fault(task_info.error) [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Faults: ['InvalidArgument'] [ 1431.600407] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] [ 1431.601737] env[62346]: INFO nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Terminating instance [ 1431.602504] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.602754] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.603458] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1431.603706] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1431.603977] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-551e1313-d633-4d65-ae22-b63539400c55 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.606434] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c01c7ba-1e10-4a57-a027-5b9d700bf10f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.615806] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1431.616085] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8368ec03-ec7b-4fdc-b6be-37c24d9822d7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.619434] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.619434] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1431.620075] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38c468d-aa26-43f8-b3e6-411b9502e198 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.625941] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1431.625941] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52e49fd6-9dde-a082-267f-80e569e57303" [ 1431.625941] env[62346]: _type = "Task" [ 1431.625941] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.634313] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52e49fd6-9dde-a082-267f-80e569e57303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.691037] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1431.691921] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1431.691921] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Deleting the datastore file [datastore2] 4d8eeb53-06e4-423f-8719-10f5283175b4 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.691921] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-164dfcf1-0a4b-4314-9ca1-655dce6e6d2c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.700494] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1431.700494] env[62346]: value = "task-4891727" [ 1431.700494] env[62346]: _type = "Task" [ 1431.700494] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.713586] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': task-4891727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.137262] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1432.137548] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1432.140262] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5c99f52-ef17-4d9f-8607-8951671715c8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.151178] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1432.151394] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Fetch image to [datastore2] vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1432.151903] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1432.152389] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13ee801-6502-471d-9820-1ef21402a5ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.163670] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95e3465-fcb5-4b61-b794-f6e64a96f6d3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.175352] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f193b0d9-ec68-4b48-97e8-428c76b0b015 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.215019] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4de7356-85c4-419b-9ec0-ee768184a0b9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.226160] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae3017e1-d64f-4dcb-bc64-1a2b3c458811 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.228379] env[62346]: DEBUG oslo_vmware.api [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': task-4891727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078114} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.228633] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.228809] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1432.229071] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1432.229167] env[62346]: INFO nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1432.231609] env[62346]: DEBUG nova.compute.claims [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1432.231854] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.231988] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.252787] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1432.349151] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1432.424484] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1432.424484] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1432.635964] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306451aa-bff1-48e3-88e2-19ea2925cff4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.645721] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e6e767-1ff7-42c5-a6ad-7db7b7092d7f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.677363] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c8569b-1bd9-476b-88de-42eb57275100 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.685437] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e47ef2-9652-4f8e-b68f-30d653520b08 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.699481] env[62346]: DEBUG nova.compute.provider_tree [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.709687] env[62346]: DEBUG nova.scheduler.client.report [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1432.725723] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.494s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.726292] env[62346]: ERROR nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1432.726292] env[62346]: Faults: ['InvalidArgument'] [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Traceback (most recent call last): [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self.driver.spawn(context, instance, image_meta, [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self._fetch_image_if_missing(context, vi) [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] image_cache(vi, tmp_image_ds_loc) [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] vm_util.copy_virtual_disk( [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] session._wait_for_task(vmdk_copy_task) [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] return self.wait_for_task(task_ref) [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] return evt.wait() [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] result = hub.switch() [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] return self.greenlet.switch() [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] self.f(*self.args, **self.kw) [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] raise exceptions.translate_fault(task_info.error) [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Faults: ['InvalidArgument'] [ 1432.726292] env[62346]: ERROR nova.compute.manager [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] [ 1432.727109] env[62346]: DEBUG nova.compute.utils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1432.728579] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Build of instance 4d8eeb53-06e4-423f-8719-10f5283175b4 was re-scheduled: A specified parameter was not correct: fileType [ 1432.728579] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1432.728956] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1432.729148] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1432.729321] env[62346]: DEBUG nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1432.729484] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1433.276836] env[62346]: DEBUG nova.network.neutron [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.296444] env[62346]: INFO nova.compute.manager [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Took 0.57 seconds to deallocate network for instance. [ 1433.441480] env[62346]: INFO nova.scheduler.client.report [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Deleted allocations for instance 4d8eeb53-06e4-423f-8719-10f5283175b4 [ 1433.462291] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f38166cb-90de-43ea-b46d-81ec0187e9fb tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.349s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.463535] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.399s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.463728] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "4d8eeb53-06e4-423f-8719-10f5283175b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.463930] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.464111] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.468777] env[62346]: INFO nova.compute.manager [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Terminating instance [ 1433.470399] env[62346]: DEBUG nova.compute.manager [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1433.470399] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1433.470864] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ff1ce14-4584-42b0-b99e-237632b29c6c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.481750] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e666864-e535-43ef-b673-71b51557aebb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.497019] env[62346]: DEBUG nova.compute.manager [None req-1989792c-b527-472e-8e74-48b58df653e5 tempest-ServerGroupTestJSON-1718991290 tempest-ServerGroupTestJSON-1718991290-project-member] [instance: 3027e833-8cb3-4ace-bc05-f8370630e1ed] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.521682] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4d8eeb53-06e4-423f-8719-10f5283175b4 could not be found. [ 1433.521960] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1433.522696] env[62346]: INFO nova.compute.manager [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1433.522940] env[62346]: DEBUG oslo.service.loopingcall [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1433.523884] env[62346]: DEBUG nova.compute.manager [-] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1433.523995] env[62346]: DEBUG nova.network.neutron [-] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1433.526013] env[62346]: DEBUG nova.compute.manager [None req-1989792c-b527-472e-8e74-48b58df653e5 tempest-ServerGroupTestJSON-1718991290 tempest-ServerGroupTestJSON-1718991290-project-member] [instance: 3027e833-8cb3-4ace-bc05-f8370630e1ed] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.557779] env[62346]: DEBUG nova.network.neutron [-] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.560231] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1989792c-b527-472e-8e74-48b58df653e5 tempest-ServerGroupTestJSON-1718991290 tempest-ServerGroupTestJSON-1718991290-project-member] Lock "3027e833-8cb3-4ace-bc05-f8370630e1ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.382s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.568676] env[62346]: INFO nova.compute.manager [-] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] Took 0.04 seconds to deallocate network for instance. [ 1433.574912] env[62346]: DEBUG nova.compute.manager [None req-f903f639-22b7-4861-990d-2bb05344f63f tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: c64fd0a9-d455-448f-bb53-82999adccf14] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.598826] env[62346]: DEBUG nova.compute.manager [None req-f903f639-22b7-4861-990d-2bb05344f63f tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: c64fd0a9-d455-448f-bb53-82999adccf14] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.629245] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f903f639-22b7-4861-990d-2bb05344f63f tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "c64fd0a9-d455-448f-bb53-82999adccf14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.360s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.639108] env[62346]: DEBUG nova.compute.manager [None req-f8ae78c6-5747-4722-b466-e7d3100e1dbe tempest-ServerActionsV293TestJSON-384051578 tempest-ServerActionsV293TestJSON-384051578-project-member] [instance: 3c523404-52dd-4248-bc92-a2d67b03009e] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.672182] env[62346]: DEBUG nova.compute.manager [None req-f8ae78c6-5747-4722-b466-e7d3100e1dbe tempest-ServerActionsV293TestJSON-384051578 tempest-ServerActionsV293TestJSON-384051578-project-member] [instance: 3c523404-52dd-4248-bc92-a2d67b03009e] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.694419] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4bcab52-fb5e-4ac7-be9a-6ab0d4442b79 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.231s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.695645] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 302.017s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.695947] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 4d8eeb53-06e4-423f-8719-10f5283175b4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1433.696048] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "4d8eeb53-06e4-423f-8719-10f5283175b4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.701409] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f8ae78c6-5747-4722-b466-e7d3100e1dbe tempest-ServerActionsV293TestJSON-384051578 tempest-ServerActionsV293TestJSON-384051578-project-member] Lock "3c523404-52dd-4248-bc92-a2d67b03009e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.736s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.710567] env[62346]: DEBUG nova.compute.manager [None req-019e3cef-920e-4a24-aba8-da18deeeefa7 tempest-ServerRescueTestJSONUnderV235-462403715 tempest-ServerRescueTestJSONUnderV235-462403715-project-member] [instance: 00208615-17d0-4e20-b1e9-80819181109e] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.735077] env[62346]: DEBUG nova.compute.manager [None req-019e3cef-920e-4a24-aba8-da18deeeefa7 tempest-ServerRescueTestJSONUnderV235-462403715 tempest-ServerRescueTestJSONUnderV235-462403715-project-member] [instance: 00208615-17d0-4e20-b1e9-80819181109e] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.755982] env[62346]: DEBUG oslo_concurrency.lockutils [None req-019e3cef-920e-4a24-aba8-da18deeeefa7 tempest-ServerRescueTestJSONUnderV235-462403715 tempest-ServerRescueTestJSONUnderV235-462403715-project-member] Lock "00208615-17d0-4e20-b1e9-80819181109e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.747s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.765238] env[62346]: DEBUG nova.compute.manager [None req-2fdf0690-dbd7-4f5f-b2be-8889bfba7c88 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] [instance: 8f0203a5-3dc5-4a2d-9a96-07bed465d1eb] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.799645] env[62346]: DEBUG nova.compute.manager [None req-2fdf0690-dbd7-4f5f-b2be-8889bfba7c88 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] [instance: 8f0203a5-3dc5-4a2d-9a96-07bed465d1eb] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.879915] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2fdf0690-dbd7-4f5f-b2be-8889bfba7c88 tempest-AttachInterfacesTestJSON-955272362 tempest-AttachInterfacesTestJSON-955272362-project-member] Lock "8f0203a5-3dc5-4a2d-9a96-07bed465d1eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.777s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.894416] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.955474] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.955603] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.957203] env[62346]: INFO nova.compute.claims [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1434.191455] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f745da82-e59b-41e3-8c63-a378dcb6ecc7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.199119] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b63a77-bca1-41e8-9821-db9770536b20 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.228998] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81debc2f-fcd6-4e07-98be-02bbd577859d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.236952] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9b15c4-82b8-4767-b57b-4d99791d61a4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.250987] env[62346]: DEBUG nova.compute.provider_tree [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1434.263514] env[62346]: DEBUG nova.scheduler.client.report [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1434.278021] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.322s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.278722] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1434.314381] env[62346]: DEBUG nova.compute.utils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1434.316086] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1434.317114] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1434.329281] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1434.400703] env[62346]: DEBUG nova.policy [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '192a01791cf84a66af64501383745d08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '718112c25d784fc1aa3f11916d691658', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1434.411711] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1434.441931] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1434.442272] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1434.442440] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1434.442633] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1434.442916] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1434.443042] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1434.443262] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1434.443420] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1434.443583] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1434.443741] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1434.443916] env[62346]: DEBUG nova.virt.hardware [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1434.444855] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a610db3f-2764-4aab-81ef-f93437f7faf7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.453655] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2c122d-5445-4121-ab2e-ad40469156a5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.979850] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Successfully created port: 201cb7f2-e9c5-4bf3-978c-c84394ed1a7a {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.685689] env[62346]: DEBUG nova.compute.manager [req-826a78e8-7e42-44a0-93ad-5ec5a7649819 req-5ee0447b-d0e8-4b46-8237-e6c5c8e4b25d service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Received event network-vif-plugged-201cb7f2-e9c5-4bf3-978c-c84394ed1a7a {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1435.685971] env[62346]: DEBUG oslo_concurrency.lockutils [req-826a78e8-7e42-44a0-93ad-5ec5a7649819 req-5ee0447b-d0e8-4b46-8237-e6c5c8e4b25d service nova] Acquiring lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.686253] env[62346]: DEBUG oslo_concurrency.lockutils [req-826a78e8-7e42-44a0-93ad-5ec5a7649819 req-5ee0447b-d0e8-4b46-8237-e6c5c8e4b25d service nova] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.686465] env[62346]: DEBUG oslo_concurrency.lockutils [req-826a78e8-7e42-44a0-93ad-5ec5a7649819 req-5ee0447b-d0e8-4b46-8237-e6c5c8e4b25d service nova] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.686641] env[62346]: DEBUG nova.compute.manager [req-826a78e8-7e42-44a0-93ad-5ec5a7649819 req-5ee0447b-d0e8-4b46-8237-e6c5c8e4b25d service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] No waiting events found dispatching network-vif-plugged-201cb7f2-e9c5-4bf3-978c-c84394ed1a7a {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1435.686808] env[62346]: WARNING nova.compute.manager [req-826a78e8-7e42-44a0-93ad-5ec5a7649819 req-5ee0447b-d0e8-4b46-8237-e6c5c8e4b25d service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Received unexpected event network-vif-plugged-201cb7f2-e9c5-4bf3-978c-c84394ed1a7a for instance with vm_state building and task_state spawning. [ 1435.748628] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Successfully updated port: 201cb7f2-e9c5-4bf3-978c-c84394ed1a7a {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1435.764580] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "refresh_cache-e9f8e137-98d4-48ef-b642-8cd9aff72f87" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.764756] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquired lock "refresh_cache-e9f8e137-98d4-48ef-b642-8cd9aff72f87" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.764915] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1435.824426] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1436.038623] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Updating instance_info_cache with network_info: [{"id": "201cb7f2-e9c5-4bf3-978c-c84394ed1a7a", "address": "fa:16:3e:d3:88:f5", "network": {"id": "3d4963f1-47ae-4fc4-a27b-83e6ebc532ad", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-903477740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "718112c25d784fc1aa3f11916d691658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap201cb7f2-e9", "ovs_interfaceid": "201cb7f2-e9c5-4bf3-978c-c84394ed1a7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.053029] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Releasing lock "refresh_cache-e9f8e137-98d4-48ef-b642-8cd9aff72f87" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.053456] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance network_info: |[{"id": "201cb7f2-e9c5-4bf3-978c-c84394ed1a7a", "address": "fa:16:3e:d3:88:f5", "network": {"id": "3d4963f1-47ae-4fc4-a27b-83e6ebc532ad", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-903477740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "718112c25d784fc1aa3f11916d691658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap201cb7f2-e9", "ovs_interfaceid": "201cb7f2-e9c5-4bf3-978c-c84394ed1a7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1436.054205] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:88:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '201cb7f2-e9c5-4bf3-978c-c84394ed1a7a', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1436.061932] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Creating folder: Project (718112c25d784fc1aa3f11916d691658). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1436.062494] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26e17d1e-e9fc-4e40-b459-a0d2068ab933 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.073340] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Created folder: Project (718112c25d784fc1aa3f11916d691658) in parent group-v953204. [ 1436.073585] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Creating folder: Instances. Parent ref: group-v953293. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1436.073774] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b89422da-3e86-43d0-8823-610c890989e1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.083124] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Created folder: Instances in parent group-v953293. [ 1436.083374] env[62346]: DEBUG oslo.service.loopingcall [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1436.083573] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1436.083783] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47aa5225-eb5c-4646-a08d-8e8ae69de495 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.103719] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1436.103719] env[62346]: value = "task-4891730" [ 1436.103719] env[62346]: _type = "Task" [ 1436.103719] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.111510] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891730, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.614447] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891730, 'name': CreateVM_Task, 'duration_secs': 0.299231} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.614621] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1436.615420] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.615580] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.615912] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1436.616243] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d929ae98-58a1-40bb-8477-7dc03cde18a8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.621014] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Waiting for the task: (returnval){ [ 1436.621014] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]527b0662-ef53-75d7-d625-66997a67d178" [ 1436.621014] env[62346]: _type = "Task" [ 1436.621014] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.630226] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]527b0662-ef53-75d7-d625-66997a67d178, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.132059] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.132457] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1437.132457] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.709495] env[62346]: DEBUG nova.compute.manager [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Received event network-changed-201cb7f2-e9c5-4bf3-978c-c84394ed1a7a {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1437.709710] env[62346]: DEBUG nova.compute.manager [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Refreshing instance network info cache due to event network-changed-201cb7f2-e9c5-4bf3-978c-c84394ed1a7a. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1437.709910] env[62346]: DEBUG oslo_concurrency.lockutils [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] Acquiring lock "refresh_cache-e9f8e137-98d4-48ef-b642-8cd9aff72f87" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.710069] env[62346]: DEBUG oslo_concurrency.lockutils [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] Acquired lock "refresh_cache-e9f8e137-98d4-48ef-b642-8cd9aff72f87" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.710224] env[62346]: DEBUG nova.network.neutron [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Refreshing network info cache for port 201cb7f2-e9c5-4bf3-978c-c84394ed1a7a {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1438.083491] env[62346]: DEBUG nova.network.neutron [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Updated VIF entry in instance network info cache for port 201cb7f2-e9c5-4bf3-978c-c84394ed1a7a. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1438.083837] env[62346]: DEBUG nova.network.neutron [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Updating instance_info_cache with network_info: [{"id": "201cb7f2-e9c5-4bf3-978c-c84394ed1a7a", "address": "fa:16:3e:d3:88:f5", "network": {"id": "3d4963f1-47ae-4fc4-a27b-83e6ebc532ad", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-903477740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "718112c25d784fc1aa3f11916d691658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap201cb7f2-e9", "ovs_interfaceid": "201cb7f2-e9c5-4bf3-978c-c84394ed1a7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.098898] env[62346]: DEBUG oslo_concurrency.lockutils [req-f6d306b3-ebe4-4929-b819-f60e18166825 req-08db78b0-d372-42c5-bc16-fb7417b66f9a service nova] Releasing lock "refresh_cache-e9f8e137-98d4-48ef-b642-8cd9aff72f87" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.708722] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.369687] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "87c6dc89-e89b-4c72-b29c-16751a749d29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.370041] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.231961] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1467.244671] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.244898] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.245064] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.245226] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1467.246594] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3d0c04-801b-4e4e-8578-636751c00421 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.256277] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a017dd85-29d8-4878-b4db-8b8c4570a8d4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.271970] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a13bf3-d535-488e-9114-6d4311e9d0c2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.279133] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31444fa0-ac4b-4cdd-9fe4-fa2c39768136 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.310863] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180590MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1467.311146] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.311436] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.450380] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.450579] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.450715] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.450840] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.450961] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.451094] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.451215] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.451330] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.451448] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.451565] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1467.466531] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1467.477077] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1467.487035] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1467.497130] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1467.508144] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1467.508410] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1467.508633] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '68', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'io_workload': '10', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_4bf15abf99ca484a9e823e9cf6595cd3': '1', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_718112c25d784fc1aa3f11916d691658': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1467.525258] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing inventories for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1467.541569] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating ProviderTree inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1467.541836] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1467.554010] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing aggregate associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, aggregates: None {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1467.573018] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing trait associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1467.758517] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b58a55-5100-4b01-b3f4-131d8d6f7fa1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.766818] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920cd5aa-99c5-4123-bc0d-632e35d7fe18 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.797785] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edfafa6-4c3e-4429-a778-1c648550f142 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.805634] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5e9075-858a-4ac1-a264-6ef9ff1abf89 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.819303] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.828419] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1467.842541] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1467.842729] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.531s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.826648] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.826999] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.220110] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.220531] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1471.220531] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1471.250808] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251075] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251166] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251265] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251388] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251508] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251628] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251746] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251862] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.251975] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1471.252109] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1471.252654] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.252837] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.252969] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1472.220488] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.220823] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.216051] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.220191] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1480.725864] env[62346]: WARNING oslo_vmware.rw_handles [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1480.725864] env[62346]: ERROR oslo_vmware.rw_handles [ 1480.726714] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1480.728843] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1480.729117] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Copying Virtual Disk [datastore2] vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/3969bc2b-9c9d-4911-a877-ccb0b0a63bc5/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1480.729409] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20c1b2ba-154c-48cf-bc61-470617aca732 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.737704] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1480.737704] env[62346]: value = "task-4891731" [ 1480.737704] env[62346]: _type = "Task" [ 1480.737704] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.746764] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.248914] env[62346]: DEBUG oslo_vmware.exceptions [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1481.249231] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.249785] env[62346]: ERROR nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1481.249785] env[62346]: Faults: ['InvalidArgument'] [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Traceback (most recent call last): [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] yield resources [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self.driver.spawn(context, instance, image_meta, [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self._fetch_image_if_missing(context, vi) [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] image_cache(vi, tmp_image_ds_loc) [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] vm_util.copy_virtual_disk( [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] session._wait_for_task(vmdk_copy_task) [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] return self.wait_for_task(task_ref) [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] return evt.wait() [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] result = hub.switch() [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] return self.greenlet.switch() [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self.f(*self.args, **self.kw) [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] raise exceptions.translate_fault(task_info.error) [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Faults: ['InvalidArgument'] [ 1481.249785] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] [ 1481.250873] env[62346]: INFO nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Terminating instance [ 1481.251694] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.251913] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.252176] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39e38755-cc9d-4bb5-89f4-ac4609e47bf7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.255056] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1481.255301] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1481.256116] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b531229-e283-46c3-9cb3-4a94b84076d9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.263506] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1481.263872] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e423358-c399-43d7-91e9-496313c4ad19 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.266195] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.266402] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1481.267456] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bb67481-08e8-43bf-b664-cb2548157e49 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.273173] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Waiting for the task: (returnval){ [ 1481.273173] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52be094c-e36d-e625-f269-5765a9723891" [ 1481.273173] env[62346]: _type = "Task" [ 1481.273173] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.281236] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52be094c-e36d-e625-f269-5765a9723891, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.332833] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1481.333055] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1481.333343] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleting the datastore file [datastore2] c8d8504b-c17f-4b1e-9769-843e49df0ea4 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1481.333730] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8466375f-aa14-44b1-a6a4-488ee90a7bec {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.340576] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1481.340576] env[62346]: value = "task-4891733" [ 1481.340576] env[62346]: _type = "Task" [ 1481.340576] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.349497] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.783914] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1481.784252] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Creating directory with path [datastore2] vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.784474] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e588d558-144a-4167-8c36-149234966bf7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.797868] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Created directory with path [datastore2] vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.798109] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Fetch image to [datastore2] vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1481.798286] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1481.799088] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a6424e-909f-4821-a274-d7740b7bdb6a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.806189] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c94bb3-0329-4358-9cd2-68fa03eae8b5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.815578] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0b0159-2be5-4436-a5f1-4a2c9de979ff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.848719] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a566d3d1-7194-46be-976f-92aef76e112d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.857687] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-511276df-2db5-49a9-a1d0-43dd4afc345b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.859466] env[62346]: DEBUG oslo_vmware.api [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073313} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.859721] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1481.859908] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1481.860094] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1481.860467] env[62346]: INFO nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1481.862730] env[62346]: DEBUG nova.compute.claims [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1481.862903] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.863131] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.884072] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1481.940529] env[62346]: DEBUG oslo_vmware.rw_handles [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1482.002275] env[62346]: DEBUG oslo_vmware.rw_handles [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1482.002456] env[62346]: DEBUG oslo_vmware.rw_handles [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1482.162778] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3126f0-4251-4f62-9bf1-662920741777 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.170938] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8cb5b5-b229-449a-8c53-931e0e07f5ca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.201994] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a62ed0-134c-43a6-a9d0-4e77c9e284ce {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.210247] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfd0a8b-a22a-4323-a584-e6303c725de1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.223911] env[62346]: DEBUG nova.compute.provider_tree [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.235023] env[62346]: DEBUG nova.scheduler.client.report [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1482.247275] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.384s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.248027] env[62346]: ERROR nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1482.248027] env[62346]: Faults: ['InvalidArgument'] [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Traceback (most recent call last): [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self.driver.spawn(context, instance, image_meta, [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self._fetch_image_if_missing(context, vi) [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] image_cache(vi, tmp_image_ds_loc) [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] vm_util.copy_virtual_disk( [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] session._wait_for_task(vmdk_copy_task) [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] return self.wait_for_task(task_ref) [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] return evt.wait() [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] result = hub.switch() [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] return self.greenlet.switch() [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] self.f(*self.args, **self.kw) [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] raise exceptions.translate_fault(task_info.error) [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Faults: ['InvalidArgument'] [ 1482.248027] env[62346]: ERROR nova.compute.manager [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] [ 1482.249314] env[62346]: DEBUG nova.compute.utils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1482.250931] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Build of instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 was re-scheduled: A specified parameter was not correct: fileType [ 1482.250931] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1482.251376] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1482.251601] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1482.251863] env[62346]: DEBUG nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1482.252091] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1482.609331] env[62346]: DEBUG nova.network.neutron [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.621526] env[62346]: INFO nova.compute.manager [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Took 0.37 seconds to deallocate network for instance. [ 1482.719750] env[62346]: INFO nova.scheduler.client.report [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted allocations for instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 [ 1482.743148] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e021d62b-fa58-4b5f-82ba-d46d75e4532e tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 581.019s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.744190] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 384.832s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.744190] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.744190] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.744668] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.746504] env[62346]: INFO nova.compute.manager [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Terminating instance [ 1482.748251] env[62346]: DEBUG nova.compute.manager [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1482.748446] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1482.748946] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70907860-eab2-4624-ba58-6afc1a52c8b5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.759397] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63294af1-2198-43b0-bfcf-5f4ee9adbdef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.770596] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: 9423ec7b-edb3-4cce-9aae-4c8076011284] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1482.796700] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c8d8504b-c17f-4b1e-9769-843e49df0ea4 could not be found. [ 1482.796700] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1482.796700] env[62346]: INFO nova.compute.manager [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1482.796700] env[62346]: DEBUG oslo.service.loopingcall [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.797104] env[62346]: DEBUG nova.compute.manager [-] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1482.797104] env[62346]: DEBUG nova.network.neutron [-] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1482.823793] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: 9423ec7b-edb3-4cce-9aae-4c8076011284] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1482.830362] env[62346]: DEBUG nova.network.neutron [-] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.839393] env[62346]: INFO nova.compute.manager [-] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] Took 0.04 seconds to deallocate network for instance. [ 1482.848102] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "9423ec7b-edb3-4cce-9aae-4c8076011284" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.496s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.863330] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: 21c12062-6eb2-4e25-b780-a3678b18d278] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1482.894035] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: 21c12062-6eb2-4e25-b780-a3678b18d278] Instance disappeared before build. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1482.916140] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "21c12062-6eb2-4e25-b780-a3678b18d278" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.538s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.928858] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1482.972242] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8e371a8c-1b19-4823-ba32-a89054f1c11b tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.228s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.975103] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 351.296s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.975349] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c8d8504b-c17f-4b1e-9769-843e49df0ea4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1482.975540] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "c8d8504b-c17f-4b1e-9769-843e49df0ea4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.991979] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.992238] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.993786] env[62346]: INFO nova.compute.claims [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.220997] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8f2ae7-88d7-423d-b818-95db3fda17d0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.229077] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49760bfc-8d75-4b45-a54c-4f671f2e5140 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.259327] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8188f6d8-0967-417c-a57f-f646fc28902a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.267023] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70adc4fa-95e1-4b60-9232-4808611e364d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.281509] env[62346]: DEBUG nova.compute.provider_tree [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.290765] env[62346]: DEBUG nova.scheduler.client.report [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1483.308111] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.308648] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1483.343305] env[62346]: DEBUG nova.compute.utils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.345057] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1483.345057] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1483.355055] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1483.411295] env[62346]: DEBUG nova.policy [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c601083f0a44da850b33189c701bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abc1ead3f9a9442ca0b85f152f94fe6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1483.419201] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1483.445325] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.445633] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.445803] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.445992] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.446158] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.446401] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.446522] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.446740] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.446926] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.447107] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.447285] env[62346]: DEBUG nova.virt.hardware [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.448155] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba93ba3f-ff47-4170-b624-1d796c5a0298 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.456849] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5bdb98-3702-46aa-8dee-906ae7a8949f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.821535] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Successfully created port: 673805d6-777f-43ed-ba10-6cf3962a0a45 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.647192] env[62346]: DEBUG nova.compute.manager [req-efaa4c5c-3725-403f-b784-193671541310 req-110561a6-8d81-4a73-a564-ae35425a7384 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Received event network-vif-plugged-673805d6-777f-43ed-ba10-6cf3962a0a45 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1484.647192] env[62346]: DEBUG oslo_concurrency.lockutils [req-efaa4c5c-3725-403f-b784-193671541310 req-110561a6-8d81-4a73-a564-ae35425a7384 service nova] Acquiring lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.647192] env[62346]: DEBUG oslo_concurrency.lockutils [req-efaa4c5c-3725-403f-b784-193671541310 req-110561a6-8d81-4a73-a564-ae35425a7384 service nova] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.647192] env[62346]: DEBUG oslo_concurrency.lockutils [req-efaa4c5c-3725-403f-b784-193671541310 req-110561a6-8d81-4a73-a564-ae35425a7384 service nova] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.647192] env[62346]: DEBUG nova.compute.manager [req-efaa4c5c-3725-403f-b784-193671541310 req-110561a6-8d81-4a73-a564-ae35425a7384 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] No waiting events found dispatching network-vif-plugged-673805d6-777f-43ed-ba10-6cf3962a0a45 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1484.647192] env[62346]: WARNING nova.compute.manager [req-efaa4c5c-3725-403f-b784-193671541310 req-110561a6-8d81-4a73-a564-ae35425a7384 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Received unexpected event network-vif-plugged-673805d6-777f-43ed-ba10-6cf3962a0a45 for instance with vm_state building and task_state spawning. [ 1484.897853] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Successfully updated port: 673805d6-777f-43ed-ba10-6cf3962a0a45 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1484.916820] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "refresh_cache-f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.916969] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "refresh_cache-f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.917133] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1484.975138] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1485.207850] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Updating instance_info_cache with network_info: [{"id": "673805d6-777f-43ed-ba10-6cf3962a0a45", "address": "fa:16:3e:ec:ea:55", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap673805d6-77", "ovs_interfaceid": "673805d6-777f-43ed-ba10-6cf3962a0a45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.220474] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "refresh_cache-f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.220823] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance network_info: |[{"id": "673805d6-777f-43ed-ba10-6cf3962a0a45", "address": "fa:16:3e:ec:ea:55", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap673805d6-77", "ovs_interfaceid": "673805d6-777f-43ed-ba10-6cf3962a0a45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1485.221262] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:ea:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '673805d6-777f-43ed-ba10-6cf3962a0a45', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1485.229445] env[62346]: DEBUG oslo.service.loopingcall [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.229975] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1485.230230] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e5ec68d-3b47-4650-823b-ab1e33ba4e0f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.253283] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1485.253283] env[62346]: value = "task-4891734" [ 1485.253283] env[62346]: _type = "Task" [ 1485.253283] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.262281] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891734, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.763962] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891734, 'name': CreateVM_Task} progress is 99%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.265301] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891734, 'name': CreateVM_Task} progress is 99%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.676916] env[62346]: DEBUG nova.compute.manager [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Received event network-changed-673805d6-777f-43ed-ba10-6cf3962a0a45 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1486.676916] env[62346]: DEBUG nova.compute.manager [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Refreshing instance network info cache due to event network-changed-673805d6-777f-43ed-ba10-6cf3962a0a45. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1486.677068] env[62346]: DEBUG oslo_concurrency.lockutils [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] Acquiring lock "refresh_cache-f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.677186] env[62346]: DEBUG oslo_concurrency.lockutils [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] Acquired lock "refresh_cache-f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.677266] env[62346]: DEBUG nova.network.neutron [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Refreshing network info cache for port 673805d6-777f-43ed-ba10-6cf3962a0a45 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1486.767589] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891734, 'name': CreateVM_Task, 'duration_secs': 1.344863} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.767772] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1486.768574] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.768788] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.769164] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1486.769424] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14039d29-3d20-4c8a-be02-8dd624de6cbb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.776421] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1486.776421] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ca37b6-2756-ab7e-0894-86957b543371" [ 1486.776421] env[62346]: _type = "Task" [ 1486.776421] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.785198] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ca37b6-2756-ab7e-0894-86957b543371, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.002578] env[62346]: DEBUG nova.network.neutron [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Updated VIF entry in instance network info cache for port 673805d6-777f-43ed-ba10-6cf3962a0a45. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1487.002946] env[62346]: DEBUG nova.network.neutron [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Updating instance_info_cache with network_info: [{"id": "673805d6-777f-43ed-ba10-6cf3962a0a45", "address": "fa:16:3e:ec:ea:55", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap673805d6-77", "ovs_interfaceid": "673805d6-777f-43ed-ba10-6cf3962a0a45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.016520] env[62346]: DEBUG oslo_concurrency.lockutils [req-99745f6f-9f9a-4ed7-be63-af018e3799c7 req-7f786751-1502-48f9-bc58-c636c513b160 service nova] Releasing lock "refresh_cache-f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.288730] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.289149] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.289253] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.834398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.224981] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "21a988a5-43cc-44f8-97f4-01c5442b6303" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.225309] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.532853] env[62346]: WARNING oslo_vmware.rw_handles [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1527.532853] env[62346]: ERROR oslo_vmware.rw_handles [ 1527.533723] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1527.535661] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1527.535992] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Copying Virtual Disk [datastore2] vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/b92e709f-2148-4e78-96d0-d1291749e3e8/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1527.536419] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b7f2ba6-59b4-4a91-a2c8-1130d6d012af {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.547890] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Waiting for the task: (returnval){ [ 1527.547890] env[62346]: value = "task-4891735" [ 1527.547890] env[62346]: _type = "Task" [ 1527.547890] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.556466] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Task: {'id': task-4891735, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.059768] env[62346]: DEBUG oslo_vmware.exceptions [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1528.060041] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.060601] env[62346]: ERROR nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1528.060601] env[62346]: Faults: ['InvalidArgument'] [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Traceback (most recent call last): [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] yield resources [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self.driver.spawn(context, instance, image_meta, [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self._fetch_image_if_missing(context, vi) [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] image_cache(vi, tmp_image_ds_loc) [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] vm_util.copy_virtual_disk( [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] session._wait_for_task(vmdk_copy_task) [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] return self.wait_for_task(task_ref) [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] return evt.wait() [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] result = hub.switch() [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] return self.greenlet.switch() [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self.f(*self.args, **self.kw) [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] raise exceptions.translate_fault(task_info.error) [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Faults: ['InvalidArgument'] [ 1528.060601] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] [ 1528.061481] env[62346]: INFO nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Terminating instance [ 1528.062522] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.062726] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1528.062964] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50347feb-a85a-4c4d-8508-15aef9d163bb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.065247] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1528.065432] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1528.066195] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab20114-d2af-4157-b74a-261db7f794da {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.073280] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1528.073501] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b1244fe-93b8-4286-8784-bc1af9408bd3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.075790] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.075963] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1528.076978] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54fe20c8-c037-4078-b208-644203ee0390 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.081908] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 1528.081908] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5291d5a1-2452-e116-811d-87b939fe53b2" [ 1528.081908] env[62346]: _type = "Task" [ 1528.081908] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.095613] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5291d5a1-2452-e116-811d-87b939fe53b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.141332] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1528.141592] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1528.141723] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Deleting the datastore file [datastore2] 1c3a2024-f6c0-4f6f-86a6-af5debee0479 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1528.141998] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-864e5824-54b6-4be0-a732-124637eec12e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.149442] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Waiting for the task: (returnval){ [ 1528.149442] env[62346]: value = "task-4891737" [ 1528.149442] env[62346]: _type = "Task" [ 1528.149442] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.157748] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Task: {'id': task-4891737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.219478] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.232193] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.232520] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.232694] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.232855] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1528.234026] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b8c9f1-aa43-4c52-886b-07272f341a98 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.243047] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62a69db-142d-404f-9b43-bec22b27b87b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.257803] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6815d0b0-6c9d-4328-8d59-6b2b77dcbc06 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.264936] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a128e4b2-9f9c-4855-bbae-5f31d2130b44 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.294246] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180573MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1528.294441] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.294622] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.370605] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.370769] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 045a7d28-8706-4818-be5f-20c03831686e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.370895] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371186] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371306] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371422] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371542] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371641] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.371751] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1528.385588] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1528.396263] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1528.406686] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1528.417754] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1528.428588] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1528.428820] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1528.428980] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '71', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_114b0cf5aadd493e9496f1c436aede91': '1', 'io_workload': '10', 'num_proj_7d9801d7e83545239af34201cc557278': '1', 'num_proj_4bf15abf99ca484a9e823e9cf6595cd3': '1', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_718112c25d784fc1aa3f11916d691658': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1528.595065] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1528.595402] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Creating directory with path [datastore2] vmware_temp/f5c0e005-36da-4b98-8916-ba68d652b742/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1528.595775] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdbd09c8-acd9-4c27-a2aa-af0ace08e61f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.610069] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Created directory with path [datastore2] vmware_temp/f5c0e005-36da-4b98-8916-ba68d652b742/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.610333] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Fetch image to [datastore2] vmware_temp/f5c0e005-36da-4b98-8916-ba68d652b742/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1528.611029] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/f5c0e005-36da-4b98-8916-ba68d652b742/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1528.611478] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40207da8-b8e6-4e5a-9e2c-8478447d3f7a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.620534] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09936f13-b5a9-41fe-a1b7-84a012e697a0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.623486] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bea558-3644-43f4-91c4-a33c648972ec {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.635035] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864c457e-2d36-4579-b5b7-361ff62805d2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.639534] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3deb97-94c1-4dd0-b3fa-61ce627fdd00 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.697363] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f65fbec-26e5-4cf2-bcc9-6959194234bd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.700699] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546d9add-8cb9-4891-9ef2-4f4f7c238847 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.710610] env[62346]: DEBUG oslo_vmware.api [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Task: {'id': task-4891737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077476} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.713322] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23f4646-9e58-4ef3-b6aa-90b1a687fd06 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.717522] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1528.717722] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1528.717897] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1528.718085] env[62346]: INFO nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1528.720411] env[62346]: DEBUG nova.compute.claims [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1528.720449] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.730726] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a99dd056-2702-4e8e-bb34-750319d6e870 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.732776] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.742687] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1528.758934] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1528.759154] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.464s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.760998] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.040s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.764154] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1528.939790] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.940619] env[62346]: ERROR nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = getattr(controller, method)(*args, **kwargs) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._get(image_id) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] resp, body = self.http_client.get(url, headers=header) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.request(url, 'GET', **kwargs) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._handle_response(resp) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exc.from_response(resp, resp.content) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] yield resources [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self.driver.spawn(context, instance, image_meta, [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._fetch_image_if_missing(context, vi) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image_fetch(context, vi, tmp_image_ds_loc) [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] images.fetch_image( [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1528.940619] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] metadata = IMAGE_API.get(context, image_ref) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return session.show(context, image_id, [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] _reraise_translated_image_exception(image_id) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise new_exc.with_traceback(exc_trace) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = getattr(controller, method)(*args, **kwargs) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._get(image_id) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] resp, body = self.http_client.get(url, headers=header) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.request(url, 'GET', **kwargs) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._handle_response(resp) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exc.from_response(resp, resp.content) [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1528.941621] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1528.941621] env[62346]: INFO nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Terminating instance [ 1528.942691] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.942910] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1528.943181] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44289e70-78b8-4dc8-9d07-f3e4a28888ac {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.946419] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1528.946614] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1528.947430] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b479828a-3ae9-43b3-849d-6a0facba4ee6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.952490] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.952662] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1528.957563] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cfe354d-e939-49e8-8ea9-e02657972736 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.959849] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1528.960301] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42034e76-5b87-48c9-b20a-67ac274fccbc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.965036] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for the task: (returnval){ [ 1528.965036] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d84d0d-521c-2442-3622-84091f4cd0c8" [ 1528.965036] env[62346]: _type = "Task" [ 1528.965036] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.973381] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d84d0d-521c-2442-3622-84091f4cd0c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.024401] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9035f745-2d6f-41d7-9284-e4054fa51f87 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.033317] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0b0332-4369-43ff-a9fd-62616570b3b6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.037899] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1529.038136] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1529.038323] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Deleting the datastore file [datastore2] 045a7d28-8706-4818-be5f-20c03831686e {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1529.038962] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae606735-9379-4c9c-9a8b-edbc40ed4fa9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.068667] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f57bd1-fd54-4e5a-afdd-e8fbbdbb941c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.071778] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for the task: (returnval){ [ 1529.071778] env[62346]: value = "task-4891739" [ 1529.071778] env[62346]: _type = "Task" [ 1529.071778] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.078457] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa5a606-9b25-414f-b697-966e2e54d261 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.084773] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': task-4891739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.094406] env[62346]: DEBUG nova.compute.provider_tree [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1529.104751] env[62346]: DEBUG nova.scheduler.client.report [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1529.121112] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.360s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.121673] env[62346]: ERROR nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1529.121673] env[62346]: Faults: ['InvalidArgument'] [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Traceback (most recent call last): [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self.driver.spawn(context, instance, image_meta, [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self._fetch_image_if_missing(context, vi) [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] image_cache(vi, tmp_image_ds_loc) [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] vm_util.copy_virtual_disk( [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] session._wait_for_task(vmdk_copy_task) [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] return self.wait_for_task(task_ref) [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] return evt.wait() [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] result = hub.switch() [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] return self.greenlet.switch() [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] self.f(*self.args, **self.kw) [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] raise exceptions.translate_fault(task_info.error) [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Faults: ['InvalidArgument'] [ 1529.121673] env[62346]: ERROR nova.compute.manager [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] [ 1529.122498] env[62346]: DEBUG nova.compute.utils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1529.124059] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Build of instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 was re-scheduled: A specified parameter was not correct: fileType [ 1529.124059] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1529.124483] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1529.124661] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1529.124865] env[62346]: DEBUG nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1529.125250] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1529.483896] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1529.485173] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Creating directory with path [datastore2] vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1529.489194] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32ffa835-2e9f-41ae-b8be-978ff1d8847d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.492179] env[62346]: DEBUG nova.network.neutron [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.504971] env[62346]: INFO nova.compute.manager [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Took 0.38 seconds to deallocate network for instance. [ 1529.510514] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Created directory with path [datastore2] vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1529.510723] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Fetch image to [datastore2] vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1529.510896] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1529.511943] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2f0399-b7c4-4465-84f1-649379ecb8d6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.523336] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45853981-8466-4c20-8539-4362ed723703 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.536500] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763cd3a6-2dad-4ad3-b371-5fabfbbda868 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.573665] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bf699c-3dcb-40a1-88ee-7c7a181192d4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.589191] env[62346]: DEBUG oslo_vmware.api [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Task: {'id': task-4891739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067573} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.591093] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1529.591311] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1529.591490] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1529.591667] env[62346]: INFO nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1529.594042] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9daa4465-7de6-404f-a5b7-ad2cd966572e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.597027] env[62346]: DEBUG nova.compute.claims [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1529.597027] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.597027] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.630041] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1529.650969] env[62346]: INFO nova.scheduler.client.report [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Deleted allocations for instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 [ 1529.705935] env[62346]: DEBUG oslo_vmware.rw_handles [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1529.710746] env[62346]: DEBUG oslo_concurrency.lockutils [None req-18f66e81-c6ae-4ec7-b732-e23f6ca96223 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.483s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.713011] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 398.034s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.717026] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] During sync_power_state the instance has a pending task (spawning). Skip. [ 1529.717026] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.766731] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.617s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.767030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Acquiring lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.767261] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.767514] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.770038] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.774714] env[62346]: INFO nova.compute.manager [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Terminating instance [ 1529.775871] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1529.779539] env[62346]: DEBUG nova.compute.manager [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1529.779741] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1529.780889] env[62346]: DEBUG oslo_vmware.rw_handles [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1529.781065] env[62346]: DEBUG oslo_vmware.rw_handles [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1529.781340] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac3278fa-df0a-4432-bb53-d61fbc1adb94 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.791742] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97df9f65-230d-41ff-893e-657284c06e2a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.829037] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c3a2024-f6c0-4f6f-86a6-af5debee0479 could not be found. [ 1529.829660] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1529.829660] env[62346]: INFO nova.compute.manager [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1529.829782] env[62346]: DEBUG oslo.service.loopingcall [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.834846] env[62346]: DEBUG nova.compute.manager [-] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1529.834976] env[62346]: DEBUG nova.network.neutron [-] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1529.853071] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.864855] env[62346]: DEBUG nova.network.neutron [-] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.876667] env[62346]: INFO nova.compute.manager [-] [instance: 1c3a2024-f6c0-4f6f-86a6-af5debee0479] Took 0.04 seconds to deallocate network for instance. [ 1529.949955] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d220183-2d99-48e9-97c0-6bfdcb892403 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.958226] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf8535a-14dc-4907-a2ad-518a36dec1c9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.990136] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f40dfaf-619c-4e80-a53a-93387f3c66de {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.995555] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c1e10656-39ae-4177-8a0d-fd7bdb3d41e7 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385 tempest-FloatingIPsAssociationNegativeTestJSON-1701197385-project-member] Lock "1c3a2024-f6c0-4f6f-86a6-af5debee0479" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.002152] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651af5c3-8954-4c28-bc42-c49fc81f7d74 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.018767] env[62346]: DEBUG nova.compute.provider_tree [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.029574] env[62346]: DEBUG nova.scheduler.client.report [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1530.044919] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.448s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.045656] env[62346]: ERROR nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = getattr(controller, method)(*args, **kwargs) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._get(image_id) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] resp, body = self.http_client.get(url, headers=header) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.request(url, 'GET', **kwargs) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._handle_response(resp) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exc.from_response(resp, resp.content) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self.driver.spawn(context, instance, image_meta, [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._fetch_image_if_missing(context, vi) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image_fetch(context, vi, tmp_image_ds_loc) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] images.fetch_image( [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] metadata = IMAGE_API.get(context, image_ref) [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1530.045656] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return session.show(context, image_id, [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] _reraise_translated_image_exception(image_id) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise new_exc.with_traceback(exc_trace) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = getattr(controller, method)(*args, **kwargs) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._get(image_id) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] resp, body = self.http_client.get(url, headers=header) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.request(url, 'GET', **kwargs) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._handle_response(resp) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exc.from_response(resp, resp.content) [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1530.046740] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.046740] env[62346]: DEBUG nova.compute.utils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1530.047834] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.195s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.049760] env[62346]: INFO nova.compute.claims [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1530.056025] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Build of instance 045a7d28-8706-4818-be5f-20c03831686e was re-scheduled: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1530.056025] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1530.056025] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1530.056025] env[62346]: DEBUG nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1530.056025] env[62346]: DEBUG nova.network.neutron [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1530.188388] env[62346]: DEBUG neutronclient.v2_0.client [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62346) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1530.189749] env[62346]: ERROR nova.compute.manager [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = getattr(controller, method)(*args, **kwargs) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._get(image_id) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] resp, body = self.http_client.get(url, headers=header) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.request(url, 'GET', **kwargs) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._handle_response(resp) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exc.from_response(resp, resp.content) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self.driver.spawn(context, instance, image_meta, [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._fetch_image_if_missing(context, vi) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image_fetch(context, vi, tmp_image_ds_loc) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] images.fetch_image( [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] metadata = IMAGE_API.get(context, image_ref) [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1530.189749] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return session.show(context, image_id, [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] _reraise_translated_image_exception(image_id) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise new_exc.with_traceback(exc_trace) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = getattr(controller, method)(*args, **kwargs) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._get(image_id) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] resp, body = self.http_client.get(url, headers=header) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.request(url, 'GET', **kwargs) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self._handle_response(resp) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exc.from_response(resp, resp.content) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._build_and_run_instance(context, instance, image, [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exception.RescheduledException( [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] nova.exception.RescheduledException: Build of instance 045a7d28-8706-4818-be5f-20c03831686e was re-scheduled: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] exception_handler_v20(status_code, error_body) [ 1530.190780] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise client_exc(message=error_message, [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Neutron server returns request_ids: ['req-f644252d-dec4-427d-94d8-1d786304a8b7'] [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._deallocate_network(context, instance, requested_networks) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self.network_api.deallocate_for_instance( [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] data = neutron.list_ports(**search_opts) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.list('ports', self.ports_path, retrieve_all, [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] for r in self._pagination(collection, path, **params): [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] res = self.get(path, params=params) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.retry_request("GET", action, body=body, [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.do_request(method, action, body=body, [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._handle_fault_response(status_code, replybody, resp) [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exception.Unauthorized() [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] nova.exception.Unauthorized: Not authorized. [ 1530.191726] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.247445] env[62346]: INFO nova.scheduler.client.report [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Deleted allocations for instance 045a7d28-8706-4818-be5f-20c03831686e [ 1530.267168] env[62346]: DEBUG oslo_concurrency.lockutils [None req-504aab58-936d-4ea9-8091-872a0ecc5368 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "045a7d28-8706-4818-be5f-20c03831686e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.253s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.268913] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "045a7d28-8706-4818-be5f-20c03831686e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.771s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.269226] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Acquiring lock "045a7d28-8706-4818-be5f-20c03831686e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.269478] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "045a7d28-8706-4818-be5f-20c03831686e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.269651] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "045a7d28-8706-4818-be5f-20c03831686e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.274468] env[62346]: INFO nova.compute.manager [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Terminating instance [ 1530.276320] env[62346]: DEBUG nova.compute.manager [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1530.276561] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1530.277017] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45c0d463-aaab-4500-822e-679a31375ddb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.286399] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d67df4b-db0c-441b-b95b-0cfbfab3f895 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.300799] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1530.326436] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 045a7d28-8706-4818-be5f-20c03831686e could not be found. [ 1530.326661] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1530.326843] env[62346]: INFO nova.compute.manager [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1530.327320] env[62346]: DEBUG oslo.service.loopingcall [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.327378] env[62346]: DEBUG nova.compute.manager [-] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1530.327464] env[62346]: DEBUG nova.network.neutron [-] [instance: 045a7d28-8706-4818-be5f-20c03831686e] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1530.358226] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.364444] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa5c7ca-3a80-41a9-b94b-c5858a9cf0f6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.372509] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fdba35-db1f-428f-a6b9-b01e417e2129 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.404834] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a22395-49d5-4ca0-872c-f0b08b98d401 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.413166] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f530d7-3bc1-4d89-9ec4-0c4f5e7b9846 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.426987] env[62346]: DEBUG nova.compute.provider_tree [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.435335] env[62346]: DEBUG nova.scheduler.client.report [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1530.443267] env[62346]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62346) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1530.443499] env[62346]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-c11627d5-4de1-447e-8639-c613dc1281db'] [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.444014] env[62346]: ERROR oslo.service.loopingcall [ 1530.445533] env[62346]: ERROR nova.compute.manager [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.451295] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.403s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.451758] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1530.453949] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.096s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.455283] env[62346]: INFO nova.compute.claims [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1530.486287] env[62346]: ERROR nova.compute.manager [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] exception_handler_v20(status_code, error_body) [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise client_exc(message=error_message, [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Neutron server returns request_ids: ['req-c11627d5-4de1-447e-8639-c613dc1281db'] [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] During handling of the above exception, another exception occurred: [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] Traceback (most recent call last): [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._delete_instance(context, instance, bdms) [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._shutdown_instance(context, instance, bdms) [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._try_deallocate_network(context, instance, requested_networks) [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] with excutils.save_and_reraise_exception(): [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self.force_reraise() [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise self.value [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] _deallocate_network_with_retries() [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return evt.wait() [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = hub.switch() [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.greenlet.switch() [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = func(*self.args, **self.kw) [ 1530.486287] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] result = f(*args, **kwargs) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._deallocate_network( [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self.network_api.deallocate_for_instance( [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] data = neutron.list_ports(**search_opts) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.list('ports', self.ports_path, retrieve_all, [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] for r in self._pagination(collection, path, **params): [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] res = self.get(path, params=params) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.retry_request("GET", action, body=body, [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] return self.do_request(method, action, body=body, [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] ret = obj(*args, **kwargs) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] self._handle_fault_response(status_code, replybody, resp) [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.487814] env[62346]: ERROR nova.compute.manager [instance: 045a7d28-8706-4818-be5f-20c03831686e] [ 1530.489746] env[62346]: DEBUG nova.compute.utils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.491080] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1530.491248] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1530.516614] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Lock "045a7d28-8706-4818-be5f-20c03831686e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.248s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.526183] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1530.571111] env[62346]: INFO nova.compute.manager [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] [instance: 045a7d28-8706-4818-be5f-20c03831686e] Successfully reverted task state from None on failure for instance. [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server [None req-1308a261-bc49-4f26-989e-ff57c23f9b43 tempest-MigrationsAdminTest-1316875804 tempest-MigrationsAdminTest-1316875804-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-c11627d5-4de1-447e-8639-c613dc1281db'] [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1530.574527] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1530.575934] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1530.577415] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1530.577415] env[62346]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1530.577415] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1530.577415] env[62346]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1530.577415] env[62346]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1530.577415] env[62346]: ERROR oslo_messaging.rpc.server [ 1530.588714] env[62346]: DEBUG nova.policy [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3308ee81ea534031a035a5f776168662', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d571ab102004368b9265ca62b137356', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1530.600910] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1530.627449] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1530.627703] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1530.627860] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1530.628051] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1530.628197] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1530.628337] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1530.628713] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1530.628803] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1530.628907] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1530.629229] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1530.629385] env[62346]: DEBUG nova.virt.hardware [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1530.630257] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a6393b-cd5c-4897-a88e-0803fad93559 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.638974] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650b23dd-c431-418e-b06d-553e5609a8b7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.766604] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b15559d-4d8f-4d9e-90d0-5dd823df923b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.775821] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754903bc-0c24-4855-9f49-7cf3f62f8498 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.808495] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efde06fc-1918-4051-af5c-fccbed2fa926 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.816913] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584d4c09-f327-4ee2-9c58-7743226f8ecf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.831528] env[62346]: DEBUG nova.compute.provider_tree [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.842558] env[62346]: DEBUG nova.scheduler.client.report [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1530.858776] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.859311] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1530.907023] env[62346]: DEBUG nova.compute.utils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.908059] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1530.908402] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1530.918582] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1530.986129] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1531.003434] env[62346]: DEBUG nova.policy [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b68a54af151441e6b6853c5502518db8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5afa33f3f2b94e68a5161002a9718f78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1531.014549] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1531.014889] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1531.015117] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1531.015370] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1531.015588] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1531.015794] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1531.016067] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1531.016360] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1531.016596] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1531.016818] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1531.017061] env[62346]: DEBUG nova.virt.hardware [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1531.017967] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfe0b45-2c3d-40d4-b8e3-13581ce23f00 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.026723] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334c40a7-93b7-4527-90a6-e4f7f5b1ac99 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.165574] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Successfully created port: 01c8f82c-172c-4d68-aac4-def960966960 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1531.219425] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.219611] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1531.219734] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1531.244575] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.244780] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.244847] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.245223] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.245485] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.245651] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.245802] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.245952] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.246112] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.246311] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1531.246467] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1531.247044] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.247303] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.247475] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1531.603089] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Successfully created port: 3d3286dc-84aa-46dc-aba0-8c68e135e43f {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1532.220623] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.220893] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.451178] env[62346]: DEBUG nova.compute.manager [req-43ab56c0-6eb7-4d02-97f2-3d68a6b65fb0 req-ceac8dc5-6f8c-461a-a965-4f00e6632333 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Received event network-vif-plugged-01c8f82c-172c-4d68-aac4-def960966960 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1532.451555] env[62346]: DEBUG oslo_concurrency.lockutils [req-43ab56c0-6eb7-4d02-97f2-3d68a6b65fb0 req-ceac8dc5-6f8c-461a-a965-4f00e6632333 service nova] Acquiring lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.451847] env[62346]: DEBUG oslo_concurrency.lockutils [req-43ab56c0-6eb7-4d02-97f2-3d68a6b65fb0 req-ceac8dc5-6f8c-461a-a965-4f00e6632333 service nova] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.452141] env[62346]: DEBUG oslo_concurrency.lockutils [req-43ab56c0-6eb7-4d02-97f2-3d68a6b65fb0 req-ceac8dc5-6f8c-461a-a965-4f00e6632333 service nova] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.452432] env[62346]: DEBUG nova.compute.manager [req-43ab56c0-6eb7-4d02-97f2-3d68a6b65fb0 req-ceac8dc5-6f8c-461a-a965-4f00e6632333 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] No waiting events found dispatching network-vif-plugged-01c8f82c-172c-4d68-aac4-def960966960 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1532.452716] env[62346]: WARNING nova.compute.manager [req-43ab56c0-6eb7-4d02-97f2-3d68a6b65fb0 req-ceac8dc5-6f8c-461a-a965-4f00e6632333 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Received unexpected event network-vif-plugged-01c8f82c-172c-4d68-aac4-def960966960 for instance with vm_state building and task_state spawning. [ 1532.548108] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Successfully updated port: 01c8f82c-172c-4d68-aac4-def960966960 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1532.564123] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "refresh_cache-8979ed84-fa1d-49a1-9f00-844d0b0f604a" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.564349] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquired lock "refresh_cache-8979ed84-fa1d-49a1-9f00-844d0b0f604a" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.564487] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1532.662198] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1533.056489] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Updating instance_info_cache with network_info: [{"id": "01c8f82c-172c-4d68-aac4-def960966960", "address": "fa:16:3e:8c:9a:c6", "network": {"id": "e73f50aa-01e0-43ec-9b56-be1eab11aca7", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1823950061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d571ab102004368b9265ca62b137356", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01c8f82c-17", "ovs_interfaceid": "01c8f82c-172c-4d68-aac4-def960966960", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.071343] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Releasing lock "refresh_cache-8979ed84-fa1d-49a1-9f00-844d0b0f604a" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.071659] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance network_info: |[{"id": "01c8f82c-172c-4d68-aac4-def960966960", "address": "fa:16:3e:8c:9a:c6", "network": {"id": "e73f50aa-01e0-43ec-9b56-be1eab11aca7", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1823950061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d571ab102004368b9265ca62b137356", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01c8f82c-17", "ovs_interfaceid": "01c8f82c-172c-4d68-aac4-def960966960", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1533.072144] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:9a:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01c8f82c-172c-4d68-aac4-def960966960', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1533.079693] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Creating folder: Project (7d571ab102004368b9265ca62b137356). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1533.080307] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4caa2399-1b1d-4708-b2ad-d864768543c3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.092751] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Created folder: Project (7d571ab102004368b9265ca62b137356) in parent group-v953204. [ 1533.092751] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Creating folder: Instances. Parent ref: group-v953297. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1533.092751] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7012d5d6-f418-4ce3-89ca-bcc00ab9aa71 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.099863] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Successfully updated port: 3d3286dc-84aa-46dc-aba0-8c68e135e43f {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1533.106016] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Created folder: Instances in parent group-v953297. [ 1533.106016] env[62346]: DEBUG oslo.service.loopingcall [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.106016] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1533.106016] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-552ab427-cafc-48de-97f5-31b833827de2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.118873] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "refresh_cache-daef9d9c-03a6-4ee8-9806-9d895f802776" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.119103] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "refresh_cache-daef9d9c-03a6-4ee8-9806-9d895f802776" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.119263] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1533.128397] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1533.128397] env[62346]: value = "task-4891742" [ 1533.128397] env[62346]: _type = "Task" [ 1533.128397] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.138634] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891742, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.189461] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1533.465142] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Updating instance_info_cache with network_info: [{"id": "3d3286dc-84aa-46dc-aba0-8c68e135e43f", "address": "fa:16:3e:51:09:2d", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d3286dc-84", "ovs_interfaceid": "3d3286dc-84aa-46dc-aba0-8c68e135e43f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.482549] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "refresh_cache-daef9d9c-03a6-4ee8-9806-9d895f802776" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.482868] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance network_info: |[{"id": "3d3286dc-84aa-46dc-aba0-8c68e135e43f", "address": "fa:16:3e:51:09:2d", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d3286dc-84", "ovs_interfaceid": "3d3286dc-84aa-46dc-aba0-8c68e135e43f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1533.483625] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:09:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d3286dc-84aa-46dc-aba0-8c68e135e43f', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1533.491408] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating folder: Project (5afa33f3f2b94e68a5161002a9718f78). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1533.491915] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2061e093-926b-4690-a443-236714851664 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.502967] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created folder: Project (5afa33f3f2b94e68a5161002a9718f78) in parent group-v953204. [ 1533.503183] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating folder: Instances. Parent ref: group-v953300. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1533.503416] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d90a331-ce8a-4912-968a-33509c124a40 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.513522] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created folder: Instances in parent group-v953300. [ 1533.513755] env[62346]: DEBUG oslo.service.loopingcall [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.513944] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1533.514158] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-766c2e74-63e1-4b28-831d-cb2803c57b6b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.534138] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1533.534138] env[62346]: value = "task-4891745" [ 1533.534138] env[62346]: _type = "Task" [ 1533.534138] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.542874] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891745, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.637161] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891742, 'name': CreateVM_Task, 'duration_secs': 0.312464} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.637366] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1533.638095] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.638267] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.638613] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.638899] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fb4172d-0680-4eb3-93d6-80d485ac9b59 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.643952] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Waiting for the task: (returnval){ [ 1533.643952] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52cb42b9-7381-50e8-96fb-ab335b1edefe" [ 1533.643952] env[62346]: _type = "Task" [ 1533.643952] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.652840] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52cb42b9-7381-50e8-96fb-ab335b1edefe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.044188] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891745, 'name': CreateVM_Task, 'duration_secs': 0.308591} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.044563] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1534.045108] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.155264] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.155540] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.155754] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.155971] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.156431] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1534.156720] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-922df164-3e81-4e32-8ff5-4bdf4e42978c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.162066] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 1534.162066] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5201f019-ddfa-c315-67f1-f162ae0f3331" [ 1534.162066] env[62346]: _type = "Task" [ 1534.162066] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.170372] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5201f019-ddfa-c315-67f1-f162ae0f3331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.220259] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.568696] env[62346]: DEBUG nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Received event network-changed-01c8f82c-172c-4d68-aac4-def960966960 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1534.568828] env[62346]: DEBUG nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Refreshing instance network info cache due to event network-changed-01c8f82c-172c-4d68-aac4-def960966960. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1534.569015] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Acquiring lock "refresh_cache-8979ed84-fa1d-49a1-9f00-844d0b0f604a" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.569173] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Acquired lock "refresh_cache-8979ed84-fa1d-49a1-9f00-844d0b0f604a" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.569335] env[62346]: DEBUG nova.network.neutron [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Refreshing network info cache for port 01c8f82c-172c-4d68-aac4-def960966960 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1534.674132] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.674132] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.674132] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.907906] env[62346]: DEBUG nova.network.neutron [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Updated VIF entry in instance network info cache for port 01c8f82c-172c-4d68-aac4-def960966960. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1534.908284] env[62346]: DEBUG nova.network.neutron [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Updating instance_info_cache with network_info: [{"id": "01c8f82c-172c-4d68-aac4-def960966960", "address": "fa:16:3e:8c:9a:c6", "network": {"id": "e73f50aa-01e0-43ec-9b56-be1eab11aca7", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1823950061-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d571ab102004368b9265ca62b137356", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01c8f82c-17", "ovs_interfaceid": "01c8f82c-172c-4d68-aac4-def960966960", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.918029] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Releasing lock "refresh_cache-8979ed84-fa1d-49a1-9f00-844d0b0f604a" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.918221] env[62346]: DEBUG nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Received event network-vif-plugged-3d3286dc-84aa-46dc-aba0-8c68e135e43f {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1534.918414] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Acquiring lock "daef9d9c-03a6-4ee8-9806-9d895f802776-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.918891] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.919093] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.919264] env[62346]: DEBUG nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] No waiting events found dispatching network-vif-plugged-3d3286dc-84aa-46dc-aba0-8c68e135e43f {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1534.919431] env[62346]: WARNING nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Received unexpected event network-vif-plugged-3d3286dc-84aa-46dc-aba0-8c68e135e43f for instance with vm_state building and task_state spawning. [ 1534.919592] env[62346]: DEBUG nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Received event network-changed-3d3286dc-84aa-46dc-aba0-8c68e135e43f {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1534.919745] env[62346]: DEBUG nova.compute.manager [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Refreshing instance network info cache due to event network-changed-3d3286dc-84aa-46dc-aba0-8c68e135e43f. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1534.919929] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Acquiring lock "refresh_cache-daef9d9c-03a6-4ee8-9806-9d895f802776" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.920074] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Acquired lock "refresh_cache-daef9d9c-03a6-4ee8-9806-9d895f802776" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.920230] env[62346]: DEBUG nova.network.neutron [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Refreshing network info cache for port 3d3286dc-84aa-46dc-aba0-8c68e135e43f {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1535.212109] env[62346]: DEBUG nova.network.neutron [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Updated VIF entry in instance network info cache for port 3d3286dc-84aa-46dc-aba0-8c68e135e43f. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1535.212512] env[62346]: DEBUG nova.network.neutron [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Updating instance_info_cache with network_info: [{"id": "3d3286dc-84aa-46dc-aba0-8c68e135e43f", "address": "fa:16:3e:51:09:2d", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d3286dc-84", "ovs_interfaceid": "3d3286dc-84aa-46dc-aba0-8c68e135e43f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.223464] env[62346]: DEBUG oslo_concurrency.lockutils [req-444fc44b-3fb7-478e-baeb-444fffcb9606 req-7e4e21b6-b16a-4ac1-a5a4-7e69ecb567f2 service nova] Releasing lock "refresh_cache-daef9d9c-03a6-4ee8-9806-9d895f802776" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.220648] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1540.383795] env[62346]: DEBUG oslo_concurrency.lockutils [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.126840] env[62346]: WARNING oslo_vmware.rw_handles [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1576.126840] env[62346]: ERROR oslo_vmware.rw_handles [ 1576.127490] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1576.130139] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1576.130139] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Copying Virtual Disk [datastore2] vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/45a86f1c-f87b-4fc3-85bc-7bbbaa58a1aa/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1576.130139] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73c9983f-6bd2-44a8-aa4e-39a147ff9f88 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.138696] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for the task: (returnval){ [ 1576.138696] env[62346]: value = "task-4891746" [ 1576.138696] env[62346]: _type = "Task" [ 1576.138696] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.147710] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Task: {'id': task-4891746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.650506] env[62346]: DEBUG oslo_vmware.exceptions [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1576.650793] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.651355] env[62346]: ERROR nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1576.651355] env[62346]: Faults: ['InvalidArgument'] [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Traceback (most recent call last): [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] yield resources [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self.driver.spawn(context, instance, image_meta, [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self._fetch_image_if_missing(context, vi) [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] image_cache(vi, tmp_image_ds_loc) [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] vm_util.copy_virtual_disk( [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] session._wait_for_task(vmdk_copy_task) [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] return self.wait_for_task(task_ref) [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] return evt.wait() [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] result = hub.switch() [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] return self.greenlet.switch() [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self.f(*self.args, **self.kw) [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] raise exceptions.translate_fault(task_info.error) [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Faults: ['InvalidArgument'] [ 1576.651355] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] [ 1576.652402] env[62346]: INFO nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Terminating instance [ 1576.653291] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.653504] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1576.653740] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e65b4e8a-7c9d-40d9-95b7-17d5ba61573d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.656103] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.656264] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquired lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.656427] env[62346]: DEBUG nova.network.neutron [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1576.663285] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1576.663452] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1576.664195] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdc9548a-1eb5-4abc-b40b-e17313ceb95d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.672019] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Waiting for the task: (returnval){ [ 1576.672019] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5274606a-8d8f-7a40-77b3-76c4c079fae0" [ 1576.672019] env[62346]: _type = "Task" [ 1576.672019] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.680463] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5274606a-8d8f-7a40-77b3-76c4c079fae0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.686936] env[62346]: DEBUG nova.network.neutron [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1576.755550] env[62346]: DEBUG nova.network.neutron [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.766754] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Releasing lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.767201] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1576.767398] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1576.768519] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03232e4d-d306-42bd-85ca-fd79aef0f487 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.778137] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1576.778370] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9ce0f62-a055-49e8-8db3-640bf5303a03 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.809707] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1576.809920] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1576.810120] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Deleting the datastore file [datastore2] b8a61eab-25ca-413b-9a01-81bf2ac37cc2 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1576.810382] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c8e1db0-5552-4ef5-8ab0-feb63c0cff26 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.817562] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for the task: (returnval){ [ 1576.817562] env[62346]: value = "task-4891748" [ 1576.817562] env[62346]: _type = "Task" [ 1576.817562] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.825285] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Task: {'id': task-4891748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.182415] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1577.182728] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Creating directory with path [datastore2] vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1577.182903] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad25e898-c705-4f11-8edd-d0d55b1cb2f0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.194996] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Created directory with path [datastore2] vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1577.195183] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Fetch image to [datastore2] vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1577.195357] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1577.196136] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d00be8a-3619-4751-ac56-f9380fb3110a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.203380] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f4e0ef-54f0-492e-81da-52b8ea2ba82f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.212746] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91fad98-8d96-4942-b184-5609bc3d968c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.243748] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc652f0-7f4b-4272-8265-15896b7a4d74 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.250223] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-02441707-71ba-49bf-b0b6-87faf1e60f9b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.275165] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1577.329101] env[62346]: DEBUG oslo_vmware.api [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Task: {'id': task-4891748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.059055} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.329468] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1577.329682] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1577.329860] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1577.330049] env[62346]: INFO nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1577.330299] env[62346]: DEBUG oslo.service.loopingcall [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.330506] env[62346]: DEBUG nova.compute.manager [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1577.332804] env[62346]: DEBUG nova.compute.claims [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1577.332976] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.333200] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.465866] env[62346]: DEBUG oslo_vmware.rw_handles [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1577.525109] env[62346]: DEBUG oslo_vmware.rw_handles [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1577.525109] env[62346]: DEBUG oslo_vmware.rw_handles [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1577.599519] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44b833c-c34f-4ea4-ad4c-ef5fb62a6742 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.607100] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3d083c-35ce-48c6-9ccb-0d913fc1b5e5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.638072] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b892d0-d012-48ce-a299-e281065e3d16 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.645321] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a624697-ed22-4cd3-9276-8d6c2440ab0c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.658890] env[62346]: DEBUG nova.compute.provider_tree [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.668231] env[62346]: DEBUG nova.scheduler.client.report [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1577.683892] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.351s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.684472] env[62346]: ERROR nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1577.684472] env[62346]: Faults: ['InvalidArgument'] [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Traceback (most recent call last): [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self.driver.spawn(context, instance, image_meta, [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self._fetch_image_if_missing(context, vi) [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] image_cache(vi, tmp_image_ds_loc) [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] vm_util.copy_virtual_disk( [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] session._wait_for_task(vmdk_copy_task) [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] return self.wait_for_task(task_ref) [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] return evt.wait() [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] result = hub.switch() [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] return self.greenlet.switch() [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] self.f(*self.args, **self.kw) [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] raise exceptions.translate_fault(task_info.error) [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Faults: ['InvalidArgument'] [ 1577.684472] env[62346]: ERROR nova.compute.manager [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] [ 1577.685384] env[62346]: DEBUG nova.compute.utils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1577.686743] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Build of instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 was re-scheduled: A specified parameter was not correct: fileType [ 1577.686743] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1577.687136] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1577.687364] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.687513] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquired lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.687672] env[62346]: DEBUG nova.network.neutron [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1577.713435] env[62346]: DEBUG nova.network.neutron [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1577.774159] env[62346]: DEBUG nova.network.neutron [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.783191] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Releasing lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.783555] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1577.783845] env[62346]: DEBUG nova.compute.manager [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Skipping network deallocation for instance since networking was not requested. {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1577.888647] env[62346]: INFO nova.scheduler.client.report [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Deleted allocations for instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 [ 1577.909451] env[62346]: DEBUG oslo_concurrency.lockutils [None req-82721f08-98a5-4006-ba2e-a5833d96defb tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.315s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.910593] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.378s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.911058] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.911058] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.911203] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.913099] env[62346]: INFO nova.compute.manager [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Terminating instance [ 1577.914784] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquiring lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.914940] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Acquired lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.915112] env[62346]: DEBUG nova.network.neutron [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1577.921033] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1577.943635] env[62346]: DEBUG nova.network.neutron [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1577.977252] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.977509] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.979111] env[62346]: INFO nova.compute.claims [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.031780] env[62346]: DEBUG nova.network.neutron [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.040984] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Releasing lock "refresh_cache-b8a61eab-25ca-413b-9a01-81bf2ac37cc2" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.041409] env[62346]: DEBUG nova.compute.manager [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1578.041599] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1578.042130] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ed46482-ed8a-4c4f-ac3a-192680081685 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.052201] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb4a21e-8088-44b9-abf2-2f256de84b18 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.087324] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8a61eab-25ca-413b-9a01-81bf2ac37cc2 could not be found. [ 1578.087422] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1578.087601] env[62346]: INFO nova.compute.manager [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1578.087950] env[62346]: DEBUG oslo.service.loopingcall [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.088191] env[62346]: DEBUG nova.compute.manager [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1578.088286] env[62346]: DEBUG nova.network.neutron [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1578.108932] env[62346]: DEBUG nova.network.neutron [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1578.116290] env[62346]: DEBUG nova.network.neutron [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.126114] env[62346]: INFO nova.compute.manager [-] [instance: b8a61eab-25ca-413b-9a01-81bf2ac37cc2] Took 0.04 seconds to deallocate network for instance. [ 1578.219129] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778c4a94-4fc3-4c7a-9402-d39d10dca44d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.227426] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5216b330-3fe9-4378-b7a8-96fda793d0b5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.264998] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e671c0-13e4-479c-b646-de10ec7b27e0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.268035] env[62346]: DEBUG oslo_concurrency.lockutils [None req-242062b4-538a-4e58-b3d4-ff9ec7991476 tempest-ServerShowV254Test-1955341614 tempest-ServerShowV254Test-1955341614-project-member] Lock "b8a61eab-25ca-413b-9a01-81bf2ac37cc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.357s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.276874] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ec4ee4-508f-4350-9cf2-5da64ce57f4f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.293560] env[62346]: DEBUG nova.compute.provider_tree [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.301030] env[62346]: DEBUG nova.scheduler.client.report [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1578.317210] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.317685] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1578.351767] env[62346]: DEBUG nova.compute.utils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1578.353243] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1578.353413] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1578.364076] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1578.436796] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1578.455922] env[62346]: DEBUG nova.policy [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e89ee3e20804bc0a07e252e6bf8c305', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c344afa6e71c4cc78e746bb53d7d4acc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1578.466399] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1578.466628] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1578.466820] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1578.467023] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1578.467174] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1578.467321] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1578.467550] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1578.467730] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1578.467942] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1578.468121] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1578.468265] env[62346]: DEBUG nova.virt.hardware [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1578.469976] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82d3015-a55b-46fc-9789-e62ca7ebbd84 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.478551] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc679575-d080-485a-8794-eeb90be48be1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.836505] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Successfully created port: 0ede8d21-8473-457f-9619-0e41e712d1a1 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1579.639950] env[62346]: DEBUG nova.compute.manager [req-a2bd28c6-784f-4244-af4d-e62c12780ae3 req-f5918f15-a0d5-4a98-b104-7f188b9e3c98 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Received event network-vif-plugged-0ede8d21-8473-457f-9619-0e41e712d1a1 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1579.640298] env[62346]: DEBUG oslo_concurrency.lockutils [req-a2bd28c6-784f-4244-af4d-e62c12780ae3 req-f5918f15-a0d5-4a98-b104-7f188b9e3c98 service nova] Acquiring lock "5af6a907-80d7-4630-aa01-c600e4908d32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.640418] env[62346]: DEBUG oslo_concurrency.lockutils [req-a2bd28c6-784f-4244-af4d-e62c12780ae3 req-f5918f15-a0d5-4a98-b104-7f188b9e3c98 service nova] Lock "5af6a907-80d7-4630-aa01-c600e4908d32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.640584] env[62346]: DEBUG oslo_concurrency.lockutils [req-a2bd28c6-784f-4244-af4d-e62c12780ae3 req-f5918f15-a0d5-4a98-b104-7f188b9e3c98 service nova] Lock "5af6a907-80d7-4630-aa01-c600e4908d32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.640780] env[62346]: DEBUG nova.compute.manager [req-a2bd28c6-784f-4244-af4d-e62c12780ae3 req-f5918f15-a0d5-4a98-b104-7f188b9e3c98 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] No waiting events found dispatching network-vif-plugged-0ede8d21-8473-457f-9619-0e41e712d1a1 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1579.640924] env[62346]: WARNING nova.compute.manager [req-a2bd28c6-784f-4244-af4d-e62c12780ae3 req-f5918f15-a0d5-4a98-b104-7f188b9e3c98 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Received unexpected event network-vif-plugged-0ede8d21-8473-457f-9619-0e41e712d1a1 for instance with vm_state building and task_state spawning. [ 1579.814719] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Successfully updated port: 0ede8d21-8473-457f-9619-0e41e712d1a1 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1579.829614] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "refresh_cache-5af6a907-80d7-4630-aa01-c600e4908d32" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.829803] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquired lock "refresh_cache-5af6a907-80d7-4630-aa01-c600e4908d32" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.829918] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1579.889625] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1580.082152] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Updating instance_info_cache with network_info: [{"id": "0ede8d21-8473-457f-9619-0e41e712d1a1", "address": "fa:16:3e:6f:df:ad", "network": {"id": "8e5a6d0e-1dfe-4da6-b5bf-f88b4ab63129", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1860174585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c344afa6e71c4cc78e746bb53d7d4acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ede8d21-84", "ovs_interfaceid": "0ede8d21-8473-457f-9619-0e41e712d1a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.096428] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Releasing lock "refresh_cache-5af6a907-80d7-4630-aa01-c600e4908d32" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.096758] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance network_info: |[{"id": "0ede8d21-8473-457f-9619-0e41e712d1a1", "address": "fa:16:3e:6f:df:ad", "network": {"id": "8e5a6d0e-1dfe-4da6-b5bf-f88b4ab63129", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1860174585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c344afa6e71c4cc78e746bb53d7d4acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ede8d21-84", "ovs_interfaceid": "0ede8d21-8473-457f-9619-0e41e712d1a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1580.097203] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:df:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f1b507ed-cd2d-4c09-9d96-c47bde6a7774', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ede8d21-8473-457f-9619-0e41e712d1a1', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.104635] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Creating folder: Project (c344afa6e71c4cc78e746bb53d7d4acc). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1580.105202] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf236a0d-bed5-4cfc-91ef-e232f5f478fb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.117074] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Created folder: Project (c344afa6e71c4cc78e746bb53d7d4acc) in parent group-v953204. [ 1580.117274] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Creating folder: Instances. Parent ref: group-v953303. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1580.117501] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19c29edc-8eb4-4da0-9da5-a0d51d60f802 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.126669] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Created folder: Instances in parent group-v953303. [ 1580.126924] env[62346]: DEBUG oslo.service.loopingcall [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.127133] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1580.127335] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3499a0c-8dba-404b-b767-a51c10089958 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.148284] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1580.148284] env[62346]: value = "task-4891751" [ 1580.148284] env[62346]: _type = "Task" [ 1580.148284] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.158474] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891751, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.658551] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891751, 'name': CreateVM_Task, 'duration_secs': 0.301147} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.658802] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1580.659434] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.659600] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.659922] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1580.660160] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa6ab55-fe65-4049-b61d-fe52a704f942 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.665156] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Waiting for the task: (returnval){ [ 1580.665156] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5233d287-5188-e23e-6235-925d839c8f45" [ 1580.665156] env[62346]: _type = "Task" [ 1580.665156] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.673438] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5233d287-5188-e23e-6235-925d839c8f45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.175960] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.176184] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1581.176398] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.751022] env[62346]: DEBUG nova.compute.manager [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Received event network-changed-0ede8d21-8473-457f-9619-0e41e712d1a1 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1581.751022] env[62346]: DEBUG nova.compute.manager [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Refreshing instance network info cache due to event network-changed-0ede8d21-8473-457f-9619-0e41e712d1a1. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1581.751022] env[62346]: DEBUG oslo_concurrency.lockutils [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] Acquiring lock "refresh_cache-5af6a907-80d7-4630-aa01-c600e4908d32" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.751022] env[62346]: DEBUG oslo_concurrency.lockutils [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] Acquired lock "refresh_cache-5af6a907-80d7-4630-aa01-c600e4908d32" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.751022] env[62346]: DEBUG nova.network.neutron [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Refreshing network info cache for port 0ede8d21-8473-457f-9619-0e41e712d1a1 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1582.126193] env[62346]: DEBUG nova.network.neutron [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Updated VIF entry in instance network info cache for port 0ede8d21-8473-457f-9619-0e41e712d1a1. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1582.126565] env[62346]: DEBUG nova.network.neutron [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Updating instance_info_cache with network_info: [{"id": "0ede8d21-8473-457f-9619-0e41e712d1a1", "address": "fa:16:3e:6f:df:ad", "network": {"id": "8e5a6d0e-1dfe-4da6-b5bf-f88b4ab63129", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1860174585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c344afa6e71c4cc78e746bb53d7d4acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ede8d21-84", "ovs_interfaceid": "0ede8d21-8473-457f-9619-0e41e712d1a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.136999] env[62346]: DEBUG oslo_concurrency.lockutils [req-8428c05c-73a9-466c-82ee-eafd7d5b55e0 req-e4ca2d2f-856f-4826-8c73-62320b426a92 service nova] Releasing lock "refresh_cache-5af6a907-80d7-4630-aa01-c600e4908d32" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.219644] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1588.232795] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.234112] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.234112] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.234112] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1588.234839] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5525c3-93ba-4412-b9ea-732d38323334 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.243870] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d8fa12-a4e3-4650-9571-b6462ac406d7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.258483] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a95f57-b75a-4b15-9fee-99124d71d9a8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.265343] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06456dc7-1475-4eae-8f46-728b77f605df {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.296278] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180574MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1588.296485] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.296612] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance da750b9f-b4d7-4c55-acfc-289222af9067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.378043] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.391217] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.407037] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.407037] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1588.407037] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '74', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_ed41fdcbed524645bc79ee368edf832b': '1', 'io_workload': '10', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_718112c25d784fc1aa3f11916d691658': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_7d571ab102004368b9265ca62b137356': '1', 'num_task_spawning': '2', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1588.569793] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26807473-c1c3-475e-a036-d027044f8270 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.579084] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3d5da2-769c-4228-957e-f9ae79a69849 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.612023] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a6cb5b-4791-4c30-8930-d980311cc800 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.620376] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e3ac05-fcd3-4a10-b12e-22f2e570a87b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.634610] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1588.644105] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1588.662366] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1588.662572] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.366s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.658556] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.659051] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.220132] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.220311] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1592.220428] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1592.242741] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.242920] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243065] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243195] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243321] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243441] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243562] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243681] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243797] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.243920] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1592.244049] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1592.244565] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.244744] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.244880] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1592.414075] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "daef9d9c-03a6-4ee8-9806-9d895f802776" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.221184] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.219746] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.297430] env[62346]: DEBUG oslo_concurrency.lockutils [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "5af6a907-80d7-4630-aa01-c600e4908d32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.217172] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1602.220648] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1626.733787] env[62346]: WARNING oslo_vmware.rw_handles [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1626.733787] env[62346]: ERROR oslo_vmware.rw_handles [ 1626.734412] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1626.736656] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1626.736916] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Copying Virtual Disk [datastore2] vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/388c89ec-f455-490e-ad91-8c3064ee0a94/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1626.737239] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9faf1779-449b-44b5-9227-579d1d8e3343 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.747318] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Waiting for the task: (returnval){ [ 1626.747318] env[62346]: value = "task-4891752" [ 1626.747318] env[62346]: _type = "Task" [ 1626.747318] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.756324] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Task: {'id': task-4891752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.257599] env[62346]: DEBUG oslo_vmware.exceptions [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1627.257875] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.258455] env[62346]: ERROR nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.258455] env[62346]: Faults: ['InvalidArgument'] [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Traceback (most recent call last): [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] yield resources [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self.driver.spawn(context, instance, image_meta, [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self._fetch_image_if_missing(context, vi) [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] image_cache(vi, tmp_image_ds_loc) [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] vm_util.copy_virtual_disk( [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] session._wait_for_task(vmdk_copy_task) [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] return self.wait_for_task(task_ref) [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] return evt.wait() [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] result = hub.switch() [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] return self.greenlet.switch() [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self.f(*self.args, **self.kw) [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] raise exceptions.translate_fault(task_info.error) [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Faults: ['InvalidArgument'] [ 1627.258455] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] [ 1627.259770] env[62346]: INFO nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Terminating instance [ 1627.260447] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.260652] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.260894] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7cd9cde9-d290-44f2-93f6-a998baa39976 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.263145] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1627.263344] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1627.264078] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf3b16a-ddbc-486e-aebb-637011fa23c6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.271891] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1627.272167] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56f072c1-3157-4383-85b1-5ae4850f4b4c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.274358] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.274532] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1627.275550] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2a53f53-d092-4003-943c-97a1ff5a1ebd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.280747] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 1627.280747] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ee4741-1bf6-7c0c-f393-70255ac03712" [ 1627.280747] env[62346]: _type = "Task" [ 1627.280747] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.288325] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ee4741-1bf6-7c0c-f393-70255ac03712, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.343418] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1627.343616] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1627.343798] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Deleting the datastore file [datastore2] da750b9f-b4d7-4c55-acfc-289222af9067 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1627.344072] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f76b6ea-50e7-45f6-a2c6-3ad3dff91f5e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.350724] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Waiting for the task: (returnval){ [ 1627.350724] env[62346]: value = "task-4891754" [ 1627.350724] env[62346]: _type = "Task" [ 1627.350724] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.358799] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Task: {'id': task-4891754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.791392] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1627.791769] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating directory with path [datastore2] vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.791816] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-037325f6-637d-450f-a8da-166ceea8c0c0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.803802] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Created directory with path [datastore2] vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.803987] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Fetch image to [datastore2] vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1627.804174] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1627.804909] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470ae29c-fd07-48c0-b818-aada0a474d3a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.812152] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9222a0e7-9330-4f54-ae50-801fa1d637e3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.821720] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4c6ca8-649a-4217-939a-752893fd2903 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.855994] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7cefcb-ebbf-4068-9e4c-37c751dc8a33 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.865658] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-536a1cfc-5709-44c8-a3d7-74d8fd146c94 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.867658] env[62346]: DEBUG oslo_vmware.api [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Task: {'id': task-4891754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081922} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.868263] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1627.868263] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1627.868263] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1627.868419] env[62346]: INFO nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1627.870540] env[62346]: DEBUG nova.compute.claims [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1627.870719] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.870928] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.896832] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1627.968061] env[62346]: DEBUG oslo_vmware.rw_handles [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1628.030871] env[62346]: DEBUG oslo_vmware.rw_handles [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1628.030980] env[62346]: DEBUG oslo_vmware.rw_handles [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1628.153269] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0e549f-0a64-4620-92a9-456d7c0cd67e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.162114] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4b1036-e989-4b2d-a6c5-9ac18832071e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.193124] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66178608-8248-4490-b5d7-2d3f6b59c0bb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.201897] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61f6773-05c4-4abe-8891-8e396a7c7372 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.216611] env[62346]: DEBUG nova.compute.provider_tree [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.227738] env[62346]: DEBUG nova.scheduler.client.report [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1628.245661] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.375s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.246218] env[62346]: ERROR nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1628.246218] env[62346]: Faults: ['InvalidArgument'] [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Traceback (most recent call last): [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self.driver.spawn(context, instance, image_meta, [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self._fetch_image_if_missing(context, vi) [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] image_cache(vi, tmp_image_ds_loc) [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] vm_util.copy_virtual_disk( [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] session._wait_for_task(vmdk_copy_task) [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] return self.wait_for_task(task_ref) [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] return evt.wait() [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] result = hub.switch() [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] return self.greenlet.switch() [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] self.f(*self.args, **self.kw) [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] raise exceptions.translate_fault(task_info.error) [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Faults: ['InvalidArgument'] [ 1628.246218] env[62346]: ERROR nova.compute.manager [instance: da750b9f-b4d7-4c55-acfc-289222af9067] [ 1628.247135] env[62346]: DEBUG nova.compute.utils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1628.248615] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Build of instance da750b9f-b4d7-4c55-acfc-289222af9067 was re-scheduled: A specified parameter was not correct: fileType [ 1628.248615] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1628.248998] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1628.249185] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1628.249356] env[62346]: DEBUG nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1628.249523] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1628.656345] env[62346]: DEBUG nova.network.neutron [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.668989] env[62346]: INFO nova.compute.manager [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Took 0.42 seconds to deallocate network for instance. [ 1628.772761] env[62346]: INFO nova.scheduler.client.report [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Deleted allocations for instance da750b9f-b4d7-4c55-acfc-289222af9067 [ 1628.796200] env[62346]: DEBUG oslo_concurrency.lockutils [None req-96242ef9-3df7-438c-98c0-2c3dfb7c7250 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "da750b9f-b4d7-4c55-acfc-289222af9067" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.622s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.797488] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "da750b9f-b4d7-4c55-acfc-289222af9067" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.123s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.797714] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Acquiring lock "da750b9f-b4d7-4c55-acfc-289222af9067-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.797926] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "da750b9f-b4d7-4c55-acfc-289222af9067-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.798181] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "da750b9f-b4d7-4c55-acfc-289222af9067-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.801452] env[62346]: INFO nova.compute.manager [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Terminating instance [ 1628.804100] env[62346]: DEBUG nova.compute.manager [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1628.804100] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1628.804100] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-597df1df-c3f2-44b4-8fdf-a3ad0ffb6892 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.820033] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c88c923-b172-4d13-b55d-c35971a63968 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.839375] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1628.859454] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance da750b9f-b4d7-4c55-acfc-289222af9067 could not be found. [ 1628.860149] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1628.860149] env[62346]: INFO nova.compute.manager [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1628.861369] env[62346]: DEBUG oslo.service.loopingcall [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.861369] env[62346]: DEBUG nova.compute.manager [-] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1628.861369] env[62346]: DEBUG nova.network.neutron [-] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1628.901623] env[62346]: DEBUG nova.network.neutron [-] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.920881] env[62346]: INFO nova.compute.manager [-] [instance: da750b9f-b4d7-4c55-acfc-289222af9067] Took 0.06 seconds to deallocate network for instance. [ 1628.933987] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.934255] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.935818] env[62346]: INFO nova.compute.claims [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1629.051848] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b3f0bc58-5c69-48c4-8438-7a21ead34320 tempest-MultipleCreateTestJSON-210960969 tempest-MultipleCreateTestJSON-210960969-project-member] Lock "da750b9f-b4d7-4c55-acfc-289222af9067" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.254s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.177747] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68bc889-4979-4bb0-af92-31d75c00c964 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.186115] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae5bd07-4bbc-465e-8b07-f61511514924 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.219135] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295dfb95-79ee-4efd-adbf-f82c543ead3f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.227385] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5bd932-593b-4e0f-885c-9d6f3bdee3cd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.241853] env[62346]: DEBUG nova.compute.provider_tree [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.251282] env[62346]: DEBUG nova.scheduler.client.report [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1629.266888] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.332s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.267467] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1629.304708] env[62346]: DEBUG nova.compute.utils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1629.306347] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1629.306568] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1629.317202] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1629.370364] env[62346]: DEBUG nova.policy [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c91a168730498c90a31900a69a5d5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f45e49e839f4cafaea598ac8f5fbd2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1629.386542] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1629.413629] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1629.413912] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1629.414159] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1629.414369] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1629.414518] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1629.414669] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1629.414876] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1629.415055] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1629.415230] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1629.415401] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1629.415570] env[62346]: DEBUG nova.virt.hardware [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1629.416492] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821f80e1-da93-435a-8da4-909a41378f7b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.425897] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e333e12-2da0-43e7-a0dc-d1214e6795f8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.729150] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Successfully created port: 1e25b30e-d300-457f-83ab-303bc7f01457 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1630.693389] env[62346]: DEBUG nova.compute.manager [req-a8c11aba-483a-481f-9cd4-f2257b6e0df4 req-4c1fbfd4-767d-402f-ad3c-a8971176b6bc service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Received event network-vif-plugged-1e25b30e-d300-457f-83ab-303bc7f01457 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1630.693666] env[62346]: DEBUG oslo_concurrency.lockutils [req-a8c11aba-483a-481f-9cd4-f2257b6e0df4 req-4c1fbfd4-767d-402f-ad3c-a8971176b6bc service nova] Acquiring lock "87c6dc89-e89b-4c72-b29c-16751a749d29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.693861] env[62346]: DEBUG oslo_concurrency.lockutils [req-a8c11aba-483a-481f-9cd4-f2257b6e0df4 req-4c1fbfd4-767d-402f-ad3c-a8971176b6bc service nova] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.694019] env[62346]: DEBUG oslo_concurrency.lockutils [req-a8c11aba-483a-481f-9cd4-f2257b6e0df4 req-4c1fbfd4-767d-402f-ad3c-a8971176b6bc service nova] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.694199] env[62346]: DEBUG nova.compute.manager [req-a8c11aba-483a-481f-9cd4-f2257b6e0df4 req-4c1fbfd4-767d-402f-ad3c-a8971176b6bc service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] No waiting events found dispatching network-vif-plugged-1e25b30e-d300-457f-83ab-303bc7f01457 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1630.694400] env[62346]: WARNING nova.compute.manager [req-a8c11aba-483a-481f-9cd4-f2257b6e0df4 req-4c1fbfd4-767d-402f-ad3c-a8971176b6bc service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Received unexpected event network-vif-plugged-1e25b30e-d300-457f-83ab-303bc7f01457 for instance with vm_state building and task_state spawning. [ 1630.736378] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Successfully updated port: 1e25b30e-d300-457f-83ab-303bc7f01457 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.750157] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "refresh_cache-87c6dc89-e89b-4c72-b29c-16751a749d29" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.750336] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired lock "refresh_cache-87c6dc89-e89b-4c72-b29c-16751a749d29" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.750470] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1630.796997] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1631.022905] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Updating instance_info_cache with network_info: [{"id": "1e25b30e-d300-457f-83ab-303bc7f01457", "address": "fa:16:3e:6c:92:47", "network": {"id": "8fd2ed7f-9f59-475e-acd6-38de7c00c978", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1185369202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f45e49e839f4cafaea598ac8f5fbd2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e25b30e-d3", "ovs_interfaceid": "1e25b30e-d300-457f-83ab-303bc7f01457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.037938] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Releasing lock "refresh_cache-87c6dc89-e89b-4c72-b29c-16751a749d29" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.038368] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance network_info: |[{"id": "1e25b30e-d300-457f-83ab-303bc7f01457", "address": "fa:16:3e:6c:92:47", "network": {"id": "8fd2ed7f-9f59-475e-acd6-38de7c00c978", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1185369202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f45e49e839f4cafaea598ac8f5fbd2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e25b30e-d3", "ovs_interfaceid": "1e25b30e-d300-457f-83ab-303bc7f01457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1631.038749] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:92:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e25b30e-d300-457f-83ab-303bc7f01457', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1631.046335] env[62346]: DEBUG oslo.service.loopingcall [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1631.047029] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1631.047205] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14ca60ed-baf7-4a0d-9045-62b72c71a6d0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.068431] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1631.068431] env[62346]: value = "task-4891755" [ 1631.068431] env[62346]: _type = "Task" [ 1631.068431] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.077484] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891755, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.580657] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891755, 'name': CreateVM_Task, 'duration_secs': 0.294961} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.581624] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1631.581624] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.581760] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.582092] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1631.582375] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60f38052-6d39-46eb-96b3-4a04a2b7341e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.587959] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 1631.587959] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52eed3ab-326e-ffea-3c6c-23a5fd1f776d" [ 1631.587959] env[62346]: _type = "Task" [ 1631.587959] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.596533] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52eed3ab-326e-ffea-3c6c-23a5fd1f776d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.099681] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.100097] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1632.100143] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.720802] env[62346]: DEBUG nova.compute.manager [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Received event network-changed-1e25b30e-d300-457f-83ab-303bc7f01457 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1632.721070] env[62346]: DEBUG nova.compute.manager [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Refreshing instance network info cache due to event network-changed-1e25b30e-d300-457f-83ab-303bc7f01457. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1632.721398] env[62346]: DEBUG oslo_concurrency.lockutils [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] Acquiring lock "refresh_cache-87c6dc89-e89b-4c72-b29c-16751a749d29" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.721579] env[62346]: DEBUG oslo_concurrency.lockutils [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] Acquired lock "refresh_cache-87c6dc89-e89b-4c72-b29c-16751a749d29" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.721744] env[62346]: DEBUG nova.network.neutron [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Refreshing network info cache for port 1e25b30e-d300-457f-83ab-303bc7f01457 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1633.051468] env[62346]: DEBUG nova.network.neutron [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Updated VIF entry in instance network info cache for port 1e25b30e-d300-457f-83ab-303bc7f01457. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1633.051855] env[62346]: DEBUG nova.network.neutron [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Updating instance_info_cache with network_info: [{"id": "1e25b30e-d300-457f-83ab-303bc7f01457", "address": "fa:16:3e:6c:92:47", "network": {"id": "8fd2ed7f-9f59-475e-acd6-38de7c00c978", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1185369202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f45e49e839f4cafaea598ac8f5fbd2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e25b30e-d3", "ovs_interfaceid": "1e25b30e-d300-457f-83ab-303bc7f01457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.062108] env[62346]: DEBUG oslo_concurrency.lockutils [req-9ad3fe81-f9eb-445c-95d4-2f6c0d3bbf44 req-f227feda-9f2c-413e-bf9e-216f0f3dd15a service nova] Releasing lock "refresh_cache-87c6dc89-e89b-4c72-b29c-16751a749d29" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.818158] env[62346]: DEBUG oslo_concurrency.lockutils [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "87c6dc89-e89b-4c72-b29c-16751a749d29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.220144] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.231430] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.231675] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.231926] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.232130] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1650.234933] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae148e5c-d6f6-40bd-9941-3936948ca663 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.244060] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bd36e1-b808-48ed-a98d-8ca8cb2ab8d2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.258957] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45f012c-9b89-4215-ae47-a21dfb763cd7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.265917] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9601940-1f56-447f-8384-42cb62efccf0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.294943] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180535MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1650.295122] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.295297] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.370790] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 88727b37-0f05-4551-ac87-e43385e0f76d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.370949] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371091] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371221] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371348] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371456] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371569] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371680] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371790] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.371899] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.383163] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1650.383390] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1650.383581] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '75', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '2', 'io_workload': '10', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_718112c25d784fc1aa3f11916d691658': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_7d571ab102004368b9265ca62b137356': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1650.520322] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147bc53d-37d2-4b62-a9a3-950c03c6ca7c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.527650] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279b754c-ef03-4251-810a-db415fa0682c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.558313] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466e3055-3e74-459f-bea7-46539d9490d8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.565690] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37ea74f-ee9a-4bfd-8c6e-908a2561188e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.578820] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1650.588062] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1650.601402] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1650.601590] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.306s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.596607] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.597059] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.220715] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.220889] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1652.221016] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1652.245512] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.245677] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.245822] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.245960] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246099] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246224] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246345] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246464] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246582] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246699] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.246822] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1653.220596] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.220861] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.220976] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1654.221372] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.220508] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.222493] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1676.003616] env[62346]: WARNING oslo_vmware.rw_handles [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1676.003616] env[62346]: ERROR oslo_vmware.rw_handles [ 1676.005861] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1676.005958] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1676.006211] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Copying Virtual Disk [datastore2] vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/341b5d06-e68c-42bc-b700-dada020dc080/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1676.006493] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba2d950f-9cca-4cd3-a72e-5fdf8572d8ba {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.015479] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 1676.015479] env[62346]: value = "task-4891756" [ 1676.015479] env[62346]: _type = "Task" [ 1676.015479] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.022975] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': task-4891756, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.525170] env[62346]: DEBUG oslo_vmware.exceptions [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1676.525429] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.525968] env[62346]: ERROR nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1676.525968] env[62346]: Faults: ['InvalidArgument'] [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Traceback (most recent call last): [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] yield resources [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self.driver.spawn(context, instance, image_meta, [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self._fetch_image_if_missing(context, vi) [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] image_cache(vi, tmp_image_ds_loc) [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] vm_util.copy_virtual_disk( [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] session._wait_for_task(vmdk_copy_task) [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] return self.wait_for_task(task_ref) [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] return evt.wait() [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] result = hub.switch() [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] return self.greenlet.switch() [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self.f(*self.args, **self.kw) [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] raise exceptions.translate_fault(task_info.error) [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Faults: ['InvalidArgument'] [ 1676.525968] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] [ 1676.527363] env[62346]: INFO nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Terminating instance [ 1676.527999] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.528231] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1676.528474] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c40ffc6-4390-4837-8713-ce475f1e4adc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.530685] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1676.530880] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1676.531610] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0253811b-063f-4cfc-8768-157d7e1c71c2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.538613] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1676.538832] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14f3cc3e-aeb9-456f-be38-92a77c7099e4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.541129] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1676.541303] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1676.542256] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8b5862d-bb0d-44c7-9a4e-c6185f747d68 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.546970] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 1676.546970] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f03cdd-ec04-d186-66c6-78df8a1bb14b" [ 1676.546970] env[62346]: _type = "Task" [ 1676.546970] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.558708] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f03cdd-ec04-d186-66c6-78df8a1bb14b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.617251] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1676.617507] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1676.617651] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Deleting the datastore file [datastore2] 88727b37-0f05-4551-ac87-e43385e0f76d {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.617879] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b50a5b9a-7cd1-44bc-b942-8874c90a3396 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.624535] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 1676.624535] env[62346]: value = "task-4891758" [ 1676.624535] env[62346]: _type = "Task" [ 1676.624535] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.633755] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': task-4891758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.057969] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1677.058385] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating directory with path [datastore2] vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1677.058458] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b614b5d7-9095-432f-9cbd-8b9226dc5507 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.070423] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created directory with path [datastore2] vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1677.070620] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Fetch image to [datastore2] vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1677.070783] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1677.071542] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558b2648-7300-4d62-beb8-7856e53ec468 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.078490] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c67ba9-6837-4b8a-bb6a-43a0ea5ba97e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.087697] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6eef57-adf9-422f-85ac-7388190c5706 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.117931] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4096033-0f5e-4214-9787-e2e128d50a94 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.124474] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-61803266-44e8-43e5-a572-a19d3f72404c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.134073] env[62346]: DEBUG oslo_vmware.api [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': task-4891758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069951} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.134315] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1677.134514] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1677.134687] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1677.134860] env[62346]: INFO nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1677.137104] env[62346]: DEBUG nova.compute.claims [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1677.137285] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.137495] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.149425] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1677.212721] env[62346]: DEBUG oslo_vmware.rw_handles [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1677.274612] env[62346]: DEBUG oslo_vmware.rw_handles [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1677.274810] env[62346]: DEBUG oslo_vmware.rw_handles [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1677.380586] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6091804b-bb40-467a-bb88-09bff6f4651f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.389226] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce30f475-c7ab-4224-9521-71fc952e2665 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.421473] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7ccf8f-58c0-4d44-be52-97b161f70cf3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.429804] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9175368d-b0de-4ae1-b88a-f5b027f52655 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.444489] env[62346]: DEBUG nova.compute.provider_tree [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.454398] env[62346]: DEBUG nova.scheduler.client.report [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1677.473736] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.336s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.474367] env[62346]: ERROR nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1677.474367] env[62346]: Faults: ['InvalidArgument'] [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Traceback (most recent call last): [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self.driver.spawn(context, instance, image_meta, [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self._fetch_image_if_missing(context, vi) [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] image_cache(vi, tmp_image_ds_loc) [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] vm_util.copy_virtual_disk( [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] session._wait_for_task(vmdk_copy_task) [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] return self.wait_for_task(task_ref) [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] return evt.wait() [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] result = hub.switch() [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] return self.greenlet.switch() [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] self.f(*self.args, **self.kw) [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] raise exceptions.translate_fault(task_info.error) [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Faults: ['InvalidArgument'] [ 1677.474367] env[62346]: ERROR nova.compute.manager [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] [ 1677.475422] env[62346]: DEBUG nova.compute.utils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1677.476884] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Build of instance 88727b37-0f05-4551-ac87-e43385e0f76d was re-scheduled: A specified parameter was not correct: fileType [ 1677.476884] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1677.477316] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1677.477490] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1677.477661] env[62346]: DEBUG nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1677.477829] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1677.847370] env[62346]: DEBUG nova.network.neutron [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.876780] env[62346]: INFO nova.compute.manager [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Took 0.40 seconds to deallocate network for instance. [ 1677.998793] env[62346]: INFO nova.scheduler.client.report [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Deleted allocations for instance 88727b37-0f05-4551-ac87-e43385e0f76d [ 1678.031069] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c29db006-0d40-4264-8283-b28800dd2fe1 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "88727b37-0f05-4551-ac87-e43385e0f76d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.267s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.032782] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "88727b37-0f05-4551-ac87-e43385e0f76d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.742s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.032782] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "88727b37-0f05-4551-ac87-e43385e0f76d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.032782] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "88727b37-0f05-4551-ac87-e43385e0f76d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.033035] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "88727b37-0f05-4551-ac87-e43385e0f76d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.034927] env[62346]: INFO nova.compute.manager [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Terminating instance [ 1678.036956] env[62346]: DEBUG nova.compute.manager [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1678.037368] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1678.037962] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5633fa8b-9cfb-497b-872d-3e26f706cbab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.048229] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5161a6b2-70f7-4166-88ba-7c516a8832b1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.060097] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1678.086145] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88727b37-0f05-4551-ac87-e43385e0f76d could not be found. [ 1678.086438] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1678.086660] env[62346]: INFO nova.compute.manager [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1678.086919] env[62346]: DEBUG oslo.service.loopingcall [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1678.087195] env[62346]: DEBUG nova.compute.manager [-] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1678.087297] env[62346]: DEBUG nova.network.neutron [-] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1678.119401] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.119668] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.121470] env[62346]: INFO nova.compute.claims [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1678.123952] env[62346]: DEBUG nova.network.neutron [-] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.132700] env[62346]: INFO nova.compute.manager [-] [instance: 88727b37-0f05-4551-ac87-e43385e0f76d] Took 0.05 seconds to deallocate network for instance. [ 1678.251033] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5914c4ac-1a68-4b88-9f08-a82e5a3170d4 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "88727b37-0f05-4551-ac87-e43385e0f76d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.335704] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d31b13-722c-4cf9-8e48-fe1df2caeb53 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.344296] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102fbeb8-4531-405b-bc4d-41506313d069 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.374925] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a37a60-3683-4f3e-a80f-2497bcd14d4b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.383476] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0f2e2b-8dcf-4e2e-a735-487a8c508fc8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.398088] env[62346]: DEBUG nova.compute.provider_tree [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1678.409808] env[62346]: DEBUG nova.scheduler.client.report [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1678.426248] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.426749] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1678.467454] env[62346]: DEBUG nova.compute.utils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1678.468864] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1678.469063] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1678.479639] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1678.529423] env[62346]: DEBUG nova.policy [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9eb444699bfe4137a12b88f71543f185', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20adb521b1574b8581a0c368923e38eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1678.549293] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1678.577028] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1678.577433] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1678.577433] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1678.577507] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1678.577666] env[62346]: DEBUG nova.virt.hardware [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1678.578683] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23db8409-95fb-4f96-af51-e393781f690b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.588012] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c52c2f-4cd5-4242-a677-dc3e779cf829 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.898710] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Successfully created port: c531eba1-71a6-401c-b2d8-de9de3b83d6c {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1679.652641] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Successfully updated port: c531eba1-71a6-401c-b2d8-de9de3b83d6c {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1679.666341] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "refresh_cache-21a988a5-43cc-44f8-97f4-01c5442b6303" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.666496] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "refresh_cache-21a988a5-43cc-44f8-97f4-01c5442b6303" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.666649] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1679.744039] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1679.938496] env[62346]: DEBUG nova.compute.manager [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Received event network-vif-plugged-c531eba1-71a6-401c-b2d8-de9de3b83d6c {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1679.938801] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] Acquiring lock "21a988a5-43cc-44f8-97f4-01c5442b6303-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.939060] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.939190] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.939410] env[62346]: DEBUG nova.compute.manager [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] No waiting events found dispatching network-vif-plugged-c531eba1-71a6-401c-b2d8-de9de3b83d6c {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1679.939508] env[62346]: WARNING nova.compute.manager [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Received unexpected event network-vif-plugged-c531eba1-71a6-401c-b2d8-de9de3b83d6c for instance with vm_state building and task_state spawning. [ 1679.939707] env[62346]: DEBUG nova.compute.manager [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Received event network-changed-c531eba1-71a6-401c-b2d8-de9de3b83d6c {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1679.939803] env[62346]: DEBUG nova.compute.manager [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Refreshing instance network info cache due to event network-changed-c531eba1-71a6-401c-b2d8-de9de3b83d6c. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1679.939961] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] Acquiring lock "refresh_cache-21a988a5-43cc-44f8-97f4-01c5442b6303" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.960872] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Updating instance_info_cache with network_info: [{"id": "c531eba1-71a6-401c-b2d8-de9de3b83d6c", "address": "fa:16:3e:bd:54:01", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc531eba1-71", "ovs_interfaceid": "c531eba1-71a6-401c-b2d8-de9de3b83d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.973710] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "refresh_cache-21a988a5-43cc-44f8-97f4-01c5442b6303" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.974012] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance network_info: |[{"id": "c531eba1-71a6-401c-b2d8-de9de3b83d6c", "address": "fa:16:3e:bd:54:01", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc531eba1-71", "ovs_interfaceid": "c531eba1-71a6-401c-b2d8-de9de3b83d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1679.974302] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] Acquired lock "refresh_cache-21a988a5-43cc-44f8-97f4-01c5442b6303" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.974481] env[62346]: DEBUG nova.network.neutron [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Refreshing network info cache for port c531eba1-71a6-401c-b2d8-de9de3b83d6c {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1679.975536] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:54:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c531eba1-71a6-401c-b2d8-de9de3b83d6c', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1679.983621] env[62346]: DEBUG oslo.service.loopingcall [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.986765] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1679.987325] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c2c5603-81a2-4d06-89c2-b291c0021955 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.009758] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1680.009758] env[62346]: value = "task-4891759" [ 1680.009758] env[62346]: _type = "Task" [ 1680.009758] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.018546] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891759, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.367498] env[62346]: DEBUG nova.network.neutron [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Updated VIF entry in instance network info cache for port c531eba1-71a6-401c-b2d8-de9de3b83d6c. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1680.367498] env[62346]: DEBUG nova.network.neutron [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Updating instance_info_cache with network_info: [{"id": "c531eba1-71a6-401c-b2d8-de9de3b83d6c", "address": "fa:16:3e:bd:54:01", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc531eba1-71", "ovs_interfaceid": "c531eba1-71a6-401c-b2d8-de9de3b83d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.378853] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec054160-b385-467a-b81e-bebae196b8d3 req-a65efdfe-6445-45a1-b18e-1d54f46931dc service nova] Releasing lock "refresh_cache-21a988a5-43cc-44f8-97f4-01c5442b6303" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.519991] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891759, 'name': CreateVM_Task, 'duration_secs': 0.313845} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.520171] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1680.520914] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.521119] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.521462] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1680.521730] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d52c2ef5-69d5-4816-baa2-b6c2cd808aa3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.526307] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 1680.526307] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5289e519-72ff-4b17-0191-96ed0ef52009" [ 1680.526307] env[62346]: _type = "Task" [ 1680.526307] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.534684] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5289e519-72ff-4b17-0191-96ed0ef52009, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.038478] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.038842] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1681.038901] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.288887] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "052de992-f28b-4c25-bfbe-3517665f1902" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.288887] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "052de992-f28b-4c25-bfbe-3517665f1902" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.480251] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "21a988a5-43cc-44f8-97f4-01c5442b6303" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.222101] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.234619] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.234831] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.234993] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.235169] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1710.236805] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38da0258-97b9-48c9-b089-49357b60255e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.245870] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348f666b-f168-410b-bb7c-1672c94db68d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.261319] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097b9ab2-b1fa-4cf5-918c-d98a8726f3a1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.268699] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a84a808-03b2-4c80-b768-4d2b37c0e830 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.299717] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180584MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1710.299922] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.300068] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.375431] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.375591] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21910ef4-a1af-4064-bf9e-350f78a938ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.375719] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.375841] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.375963] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.376094] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.376213] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.376349] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.376501] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.376566] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.387827] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1710.388061] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1710.388226] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '76', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_20adb521b1574b8581a0c368923e38eb': '2', 'io_workload': '10', 'num_proj_f059d4d596ee4d2abf7190a5806dd848': '1', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'num_proj_718112c25d784fc1aa3f11916d691658': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_7d571ab102004368b9265ca62b137356': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1710.532644] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d71081-98b1-4cfa-af9e-ecda4ab11def {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.540636] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428a98af-8053-481d-bb5d-6b8deefb35b7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.570444] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b668576c-c4aa-4520-9553-bbd251c1e07b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.578819] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d286f5-428b-49ca-a359-b3f9db9df1ee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.592425] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.601999] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1710.615860] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1710.616086] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.316s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.616294] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.616458] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1710.624937] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] There are 0 instances to clean {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1711.623477] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.215457] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.220623] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.220956] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1713.221013] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1713.241864] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.242211] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.242406] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.242549] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.242705] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.242859] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.242994] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.243161] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.243300] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.243447] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.243601] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1713.244151] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.221177] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.221177] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.221177] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1716.221259] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.220627] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.224189] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.220591] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.023773] env[62346]: WARNING oslo_vmware.rw_handles [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1726.023773] env[62346]: ERROR oslo_vmware.rw_handles [ 1726.024577] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1726.026560] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1726.026812] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Copying Virtual Disk [datastore2] vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/78775835-86ff-42e0-9344-3be77387ce2d/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1726.027131] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52f8a08d-1fc5-44f0-ab29-eafbf4e8fc2a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.037566] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 1726.037566] env[62346]: value = "task-4891760" [ 1726.037566] env[62346]: _type = "Task" [ 1726.037566] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.045845] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891760, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.220725] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.221072] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances with incomplete migration {{(pid=62346) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1726.547707] env[62346]: DEBUG oslo_vmware.exceptions [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1726.548008] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.548612] env[62346]: ERROR nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1726.548612] env[62346]: Faults: ['InvalidArgument'] [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Traceback (most recent call last): [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] yield resources [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self.driver.spawn(context, instance, image_meta, [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self._fetch_image_if_missing(context, vi) [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] image_cache(vi, tmp_image_ds_loc) [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] vm_util.copy_virtual_disk( [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] session._wait_for_task(vmdk_copy_task) [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] return self.wait_for_task(task_ref) [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] return evt.wait() [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] result = hub.switch() [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] return self.greenlet.switch() [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self.f(*self.args, **self.kw) [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] raise exceptions.translate_fault(task_info.error) [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Faults: ['InvalidArgument'] [ 1726.548612] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] [ 1726.549957] env[62346]: INFO nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Terminating instance [ 1726.550605] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.550812] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1726.551064] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-271f6260-19cc-4227-a056-901abb7fde00 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.554686] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1726.554871] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1726.555780] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50a9c37-cf57-4114-b40a-3ca386023847 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.559727] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1726.559898] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1726.560913] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2193fdd-61e3-4825-838c-8f6042564781 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.564846] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1726.565397] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6f27c04-3150-4ec5-afb1-457a281a37fd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.568096] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Waiting for the task: (returnval){ [ 1726.568096] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522c08f8-4d85-5a70-a6dc-eff0173e68de" [ 1726.568096] env[62346]: _type = "Task" [ 1726.568096] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.576201] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522c08f8-4d85-5a70-a6dc-eff0173e68de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.642233] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1726.642451] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1726.642637] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleting the datastore file [datastore2] 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1726.642923] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6523a62b-4b09-48a2-a009-b1d83b6e920e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.649859] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 1726.649859] env[62346]: value = "task-4891762" [ 1726.649859] env[62346]: _type = "Task" [ 1726.649859] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.658338] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.079220] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1727.079480] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Creating directory with path [datastore2] vmware_temp/3424b456-2a2f-4c1f-af3f-105eacc62834/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.079743] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b3e79f8-9403-452b-8962-f4038900c665 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.092968] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Created directory with path [datastore2] vmware_temp/3424b456-2a2f-4c1f-af3f-105eacc62834/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.093191] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Fetch image to [datastore2] vmware_temp/3424b456-2a2f-4c1f-af3f-105eacc62834/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1727.093403] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/3424b456-2a2f-4c1f-af3f-105eacc62834/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1727.094213] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb720b92-f58e-4138-bc1e-57c143fee2b1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.102169] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7617ce3e-c702-492f-abd0-4f057e888e73 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.111256] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac49757-e3cd-4c72-9235-0663e8f695a0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.141491] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e9a684-75e9-4847-a54b-aebdeeff3dc5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.147995] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b63623cc-8076-48d2-9959-52a997396860 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.158093] env[62346]: DEBUG oslo_vmware.api [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069124} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.158318] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1727.158526] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1727.158701] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1727.158862] env[62346]: INFO nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1727.160947] env[62346]: DEBUG nova.compute.claims [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1727.161146] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.161364] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.169062] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1727.310162] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.310909] env[62346]: ERROR nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = getattr(controller, method)(*args, **kwargs) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._get(image_id) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] resp, body = self.http_client.get(url, headers=header) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.request(url, 'GET', **kwargs) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._handle_response(resp) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exc.from_response(resp, resp.content) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] yield resources [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self.driver.spawn(context, instance, image_meta, [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._fetch_image_if_missing(context, vi) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image_fetch(context, vi, tmp_image_ds_loc) [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] images.fetch_image( [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1727.310909] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] metadata = IMAGE_API.get(context, image_ref) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return session.show(context, image_id, [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] _reraise_translated_image_exception(image_id) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise new_exc.with_traceback(exc_trace) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = getattr(controller, method)(*args, **kwargs) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._get(image_id) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] resp, body = self.http_client.get(url, headers=header) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.request(url, 'GET', **kwargs) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._handle_response(resp) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exc.from_response(resp, resp.content) [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1727.312206] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1727.312206] env[62346]: INFO nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Terminating instance [ 1727.312931] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.312972] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.313598] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1727.313822] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1727.314070] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d6cc16e-2bc1-478c-ad1c-2ef6eadc2f3d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.316505] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66aac34-7dd3-43ba-b33b-dee1c9d87a3b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.326498] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1727.326773] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bb095d1-8bab-4709-8284-8f16d3d23ecc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.329164] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.329343] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1727.330326] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89624963-8c7e-4a3e-a311-04be75d0b34d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.338480] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1727.338480] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52295b8a-6b08-a4c6-b4dc-35e4be618c9d" [ 1727.338480] env[62346]: _type = "Task" [ 1727.338480] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.346818] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52295b8a-6b08-a4c6-b4dc-35e4be618c9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.359739] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39b2598-9b7c-4e78-9cea-3b1dc6ce5366 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.367207] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aaf8220-e30f-4b35-9814-18ab625c611f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.400334] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759ed095-002a-412b-8934-32c139924335 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.409043] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9e1a26-926b-4385-a49d-d695836dd056 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.423164] env[62346]: DEBUG nova.compute.provider_tree [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1727.432500] env[62346]: DEBUG nova.scheduler.client.report [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1727.452351] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.291s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.452660] env[62346]: ERROR nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1727.452660] env[62346]: Faults: ['InvalidArgument'] [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Traceback (most recent call last): [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self.driver.spawn(context, instance, image_meta, [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self._fetch_image_if_missing(context, vi) [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] image_cache(vi, tmp_image_ds_loc) [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] vm_util.copy_virtual_disk( [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] session._wait_for_task(vmdk_copy_task) [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] return self.wait_for_task(task_ref) [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] return evt.wait() [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] result = hub.switch() [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] return self.greenlet.switch() [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] self.f(*self.args, **self.kw) [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] raise exceptions.translate_fault(task_info.error) [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Faults: ['InvalidArgument'] [ 1727.452660] env[62346]: ERROR nova.compute.manager [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] [ 1727.453858] env[62346]: DEBUG nova.compute.utils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1727.456403] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Build of instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 was re-scheduled: A specified parameter was not correct: fileType [ 1727.456403] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1727.457255] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1727.457255] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1727.457255] env[62346]: DEBUG nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1727.457433] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1727.460576] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1727.460778] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1727.460959] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Deleting the datastore file [datastore2] 21910ef4-a1af-4064-bf9e-350f78a938ae {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1727.461234] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1c0938a-a399-4d4d-a98e-8f03f8376a45 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.469311] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Waiting for the task: (returnval){ [ 1727.469311] env[62346]: value = "task-4891764" [ 1727.469311] env[62346]: _type = "Task" [ 1727.469311] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.478296] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Task: {'id': task-4891764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.853024] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1727.853308] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating directory with path [datastore2] vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.853652] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-996d5a32-07ab-4f03-b836-3ad6a4ad1f45 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.868480] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Created directory with path [datastore2] vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.868901] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Fetch image to [datastore2] vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1727.869140] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1727.870724] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc6099e-14cc-472f-b5a4-55f3c84a86b4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.877343] env[62346]: DEBUG nova.network.neutron [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.885750] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a917edea-bc71-4ab3-847c-8c69eced4513 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.899388] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746c7e5a-322c-4999-acb2-83fd843c2e5b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.903824] env[62346]: INFO nova.compute.manager [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Took 0.45 seconds to deallocate network for instance. [ 1727.937840] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29262805-c4dc-420f-8d17-ae69687cbfef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.945968] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca827338-5b4a-45f6-866d-0275dddbd5bd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.977570] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1727.983970] env[62346]: DEBUG oslo_vmware.api [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Task: {'id': task-4891764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090527} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.984273] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1727.984465] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1727.984641] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1727.985207] env[62346]: INFO nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Took 0.67 seconds to destroy the instance on the hypervisor. [ 1727.987059] env[62346]: DEBUG nova.compute.claims [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1727.987298] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.987602] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.024619] env[62346]: INFO nova.scheduler.client.report [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleted allocations for instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 [ 1728.060176] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f18c045e-e9ac-41b7-8d37-b46fea2d5a7b tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.841s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.062024] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.718s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.062404] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.062628] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.062802] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.065089] env[62346]: INFO nova.compute.manager [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Terminating instance [ 1728.066822] env[62346]: DEBUG nova.compute.manager [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1728.067023] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1728.067525] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b1c3794-ebb2-4a43-8707-bbaeac78e4ce {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.075676] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1728.088768] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca0bcc5-6e11-43e7-b1b4-5b7e11b7eccf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.125335] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 732fbaa9-beef-488f-9bf6-095ffa1fc1c4 could not be found. [ 1728.125537] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1728.125711] env[62346]: INFO nova.compute.manager [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1728.125954] env[62346]: DEBUG oslo.service.loopingcall [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.128306] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1728.132302] env[62346]: DEBUG nova.compute.manager [-] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1728.132420] env[62346]: DEBUG nova.network.neutron [-] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1728.189681] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.194056] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1728.194241] env[62346]: DEBUG oslo_vmware.rw_handles [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1728.201303] env[62346]: DEBUG nova.network.neutron [-] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.212742] env[62346]: INFO nova.compute.manager [-] [instance: 732fbaa9-beef-488f-9bf6-095ffa1fc1c4] Took 0.08 seconds to deallocate network for instance. [ 1728.266570] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8081bc79-bfe9-4a06-a80f-d27703ab6672 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.280025] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802ec95f-30a1-4533-ad46-33720ead86d8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.309706] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3987a15e-a084-443a-a8be-a1a11db0a018 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.320020] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5c386a70-c288-4570-8e9e-b469fd9fd102 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "732fbaa9-beef-488f-9bf6-095ffa1fc1c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.258s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.323612] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c1e0de-d8d3-4757-b256-facd904be575 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.338568] env[62346]: DEBUG nova.compute.provider_tree [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.347548] env[62346]: DEBUG nova.scheduler.client.report [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1728.363035] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.375s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.363782] env[62346]: ERROR nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = getattr(controller, method)(*args, **kwargs) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._get(image_id) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] resp, body = self.http_client.get(url, headers=header) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.request(url, 'GET', **kwargs) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._handle_response(resp) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exc.from_response(resp, resp.content) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self.driver.spawn(context, instance, image_meta, [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._fetch_image_if_missing(context, vi) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image_fetch(context, vi, tmp_image_ds_loc) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] images.fetch_image( [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] metadata = IMAGE_API.get(context, image_ref) [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1728.363782] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return session.show(context, image_id, [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] _reraise_translated_image_exception(image_id) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise new_exc.with_traceback(exc_trace) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = getattr(controller, method)(*args, **kwargs) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._get(image_id) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] resp, body = self.http_client.get(url, headers=header) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.request(url, 'GET', **kwargs) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._handle_response(resp) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exc.from_response(resp, resp.content) [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1728.364998] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.364998] env[62346]: DEBUG nova.compute.utils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1728.365820] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.176s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.367028] env[62346]: INFO nova.compute.claims [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1728.370050] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Build of instance 21910ef4-a1af-4064-bf9e-350f78a938ae was re-scheduled: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1728.370544] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1728.370721] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1728.370887] env[62346]: DEBUG nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1728.371070] env[62346]: DEBUG nova.network.neutron [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1728.468716] env[62346]: DEBUG neutronclient.v2_0.client [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62346) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1728.469966] env[62346]: ERROR nova.compute.manager [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = getattr(controller, method)(*args, **kwargs) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._get(image_id) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] resp, body = self.http_client.get(url, headers=header) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.request(url, 'GET', **kwargs) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._handle_response(resp) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exc.from_response(resp, resp.content) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self.driver.spawn(context, instance, image_meta, [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._fetch_image_if_missing(context, vi) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image_fetch(context, vi, tmp_image_ds_loc) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] images.fetch_image( [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] metadata = IMAGE_API.get(context, image_ref) [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1728.469966] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return session.show(context, image_id, [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] _reraise_translated_image_exception(image_id) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise new_exc.with_traceback(exc_trace) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = getattr(controller, method)(*args, **kwargs) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._get(image_id) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] resp, body = self.http_client.get(url, headers=header) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.request(url, 'GET', **kwargs) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self._handle_response(resp) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exc.from_response(resp, resp.content) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] nova.exception.ImageNotAuthorized: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._build_and_run_instance(context, instance, image, [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exception.RescheduledException( [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] nova.exception.RescheduledException: Build of instance 21910ef4-a1af-4064-bf9e-350f78a938ae was re-scheduled: Not authorized for image 9feb52a6-5366-4257-bc23-471887ce1370. [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] exception_handler_v20(status_code, error_body) [ 1728.471191] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise client_exc(message=error_message, [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Neutron server returns request_ids: ['req-8d5dfccc-f8fd-4e64-a0d0-8ddefe0894ad'] [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._deallocate_network(context, instance, requested_networks) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self.network_api.deallocate_for_instance( [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] data = neutron.list_ports(**search_opts) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.list('ports', self.ports_path, retrieve_all, [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] for r in self._pagination(collection, path, **params): [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] res = self.get(path, params=params) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.retry_request("GET", action, body=body, [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.do_request(method, action, body=body, [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._handle_fault_response(status_code, replybody, resp) [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exception.Unauthorized() [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] nova.exception.Unauthorized: Not authorized. [ 1728.472443] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.520520] env[62346]: INFO nova.scheduler.client.report [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Deleted allocations for instance 21910ef4-a1af-4064-bf9e-350f78a938ae [ 1728.540405] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5cd4fa4c-a3e1-465c-8c3c-63e74eb99b14 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 553.613s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.543921] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 357.997s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.544207] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Acquiring lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.544415] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.545026] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.547026] env[62346]: INFO nova.compute.manager [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Terminating instance [ 1728.548961] env[62346]: DEBUG nova.compute.manager [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1728.549185] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1728.549725] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9aab3f40-55c9-45ff-b36d-7fa47fef7f2c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.561458] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9290f2ea-88b9-4ee5-b0e5-fd8363f7a697 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.573403] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891603a2-737c-48bf-ad0b-f992bcf0fb83 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.581351] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42d9e1e-9bdf-4d1b-afd6-2273d0d19141 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.595612] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21910ef4-a1af-4064-bf9e-350f78a938ae could not be found. [ 1728.595827] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1728.596338] env[62346]: INFO nova.compute.manager [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1728.596338] env[62346]: DEBUG oslo.service.loopingcall [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.619644] env[62346]: DEBUG nova.compute.manager [-] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1728.619762] env[62346]: DEBUG nova.network.neutron [-] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1728.622565] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26265bd-2e13-40e3-82f9-891107dda849 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.630770] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22de6406-48a3-43b1-979e-b773ca24cb12 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.645986] env[62346]: DEBUG nova.compute.provider_tree [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.655052] env[62346]: DEBUG nova.scheduler.client.report [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1728.668766] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.669270] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1728.704673] env[62346]: DEBUG nova.compute.utils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1728.706120] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1728.706310] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1728.716936] env[62346]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62346) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1728.717244] env[62346]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-192524dc-8502-468c-ad82-0442b9deb0e6'] [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.717821] env[62346]: ERROR oslo.service.loopingcall [ 1728.719467] env[62346]: ERROR nova.compute.manager [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.721797] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1728.747259] env[62346]: ERROR nova.compute.manager [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] exception_handler_v20(status_code, error_body) [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise client_exc(message=error_message, [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Neutron server returns request_ids: ['req-192524dc-8502-468c-ad82-0442b9deb0e6'] [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] During handling of the above exception, another exception occurred: [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Traceback (most recent call last): [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._delete_instance(context, instance, bdms) [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._shutdown_instance(context, instance, bdms) [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._try_deallocate_network(context, instance, requested_networks) [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] with excutils.save_and_reraise_exception(): [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self.force_reraise() [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise self.value [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] _deallocate_network_with_retries() [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return evt.wait() [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = hub.switch() [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.greenlet.switch() [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = func(*self.args, **self.kw) [ 1728.747259] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] result = f(*args, **kwargs) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._deallocate_network( [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self.network_api.deallocate_for_instance( [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] data = neutron.list_ports(**search_opts) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.list('ports', self.ports_path, retrieve_all, [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] for r in self._pagination(collection, path, **params): [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] res = self.get(path, params=params) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.retry_request("GET", action, body=body, [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] return self.do_request(method, action, body=body, [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] ret = obj(*args, **kwargs) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] self._handle_fault_response(status_code, replybody, resp) [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.748651] env[62346]: ERROR nova.compute.manager [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] [ 1728.774034] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Lock "21910ef4-a1af-4064-bf9e-350f78a938ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.230s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.784493] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1728.813272] env[62346]: DEBUG nova.policy [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c601083f0a44da850b33189c701bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abc1ead3f9a9442ca0b85f152f94fe6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1728.818236] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1728.818491] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1728.818681] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1728.818892] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1728.819070] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1728.819207] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1728.819419] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1728.819605] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1728.819793] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1728.819985] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1728.820210] env[62346]: DEBUG nova.virt.hardware [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1728.821142] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70007463-d99c-41c1-a2d0-54821de10f4a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.830436] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d747c37-c5ce-47f0-8d51-3b16bdefbd27 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.847885] env[62346]: INFO nova.compute.manager [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] [instance: 21910ef4-a1af-4064-bf9e-350f78a938ae] Successfully reverted task state from None on failure for instance. [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server [None req-6cf4a291-73db-4479-8591-162ab7f56799 tempest-ServersTestMultiNic-1401990456 tempest-ServersTestMultiNic-1401990456-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-192524dc-8502-468c-ad82-0442b9deb0e6'] [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1728.853595] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server raise self.value [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1728.856866] env[62346]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1728.859255] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1728.859255] env[62346]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1728.859255] env[62346]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1728.859255] env[62346]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1728.859255] env[62346]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1728.859255] env[62346]: ERROR oslo_messaging.rpc.server [ 1729.172894] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Successfully created port: 9da3d659-f0e4-45ab-ad7d-88124ef9c53f {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1730.101781] env[62346]: DEBUG nova.compute.manager [req-308d9ca3-b6b2-4740-b253-8751c50c0267 req-28dda600-21f1-4694-9e7b-b198b9c58622 service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Received event network-vif-plugged-9da3d659-f0e4-45ab-ad7d-88124ef9c53f {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1730.102119] env[62346]: DEBUG oslo_concurrency.lockutils [req-308d9ca3-b6b2-4740-b253-8751c50c0267 req-28dda600-21f1-4694-9e7b-b198b9c58622 service nova] Acquiring lock "052de992-f28b-4c25-bfbe-3517665f1902-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.102273] env[62346]: DEBUG oslo_concurrency.lockutils [req-308d9ca3-b6b2-4740-b253-8751c50c0267 req-28dda600-21f1-4694-9e7b-b198b9c58622 service nova] Lock "052de992-f28b-4c25-bfbe-3517665f1902-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.102505] env[62346]: DEBUG oslo_concurrency.lockutils [req-308d9ca3-b6b2-4740-b253-8751c50c0267 req-28dda600-21f1-4694-9e7b-b198b9c58622 service nova] Lock "052de992-f28b-4c25-bfbe-3517665f1902-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.103604] env[62346]: DEBUG nova.compute.manager [req-308d9ca3-b6b2-4740-b253-8751c50c0267 req-28dda600-21f1-4694-9e7b-b198b9c58622 service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] No waiting events found dispatching network-vif-plugged-9da3d659-f0e4-45ab-ad7d-88124ef9c53f {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1730.103604] env[62346]: WARNING nova.compute.manager [req-308d9ca3-b6b2-4740-b253-8751c50c0267 req-28dda600-21f1-4694-9e7b-b198b9c58622 service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Received unexpected event network-vif-plugged-9da3d659-f0e4-45ab-ad7d-88124ef9c53f for instance with vm_state building and task_state spawning. [ 1730.197339] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Successfully updated port: 9da3d659-f0e4-45ab-ad7d-88124ef9c53f {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1730.210796] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "refresh_cache-052de992-f28b-4c25-bfbe-3517665f1902" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.210796] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "refresh_cache-052de992-f28b-4c25-bfbe-3517665f1902" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.210796] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1730.252656] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1730.439707] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Updating instance_info_cache with network_info: [{"id": "9da3d659-f0e4-45ab-ad7d-88124ef9c53f", "address": "fa:16:3e:c1:69:70", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9da3d659-f0", "ovs_interfaceid": "9da3d659-f0e4-45ab-ad7d-88124ef9c53f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.453115] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "refresh_cache-052de992-f28b-4c25-bfbe-3517665f1902" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.453422] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance network_info: |[{"id": "9da3d659-f0e4-45ab-ad7d-88124ef9c53f", "address": "fa:16:3e:c1:69:70", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9da3d659-f0", "ovs_interfaceid": "9da3d659-f0e4-45ab-ad7d-88124ef9c53f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1730.453830] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:69:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9da3d659-f0e4-45ab-ad7d-88124ef9c53f', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1730.461698] env[62346]: DEBUG oslo.service.loopingcall [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.462215] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1730.462449] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb975c38-9994-430d-997c-33cbac61cf09 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.483825] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1730.483825] env[62346]: value = "task-4891765" [ 1730.483825] env[62346]: _type = "Task" [ 1730.483825] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.496188] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891765, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.996251] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891765, 'name': CreateVM_Task, 'duration_secs': 0.316709} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.996460] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1730.997179] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.997406] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.997762] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1730.998052] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4429e82-f2ba-4e68-9cf7-c18f75dd52dd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.002828] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1731.002828] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]521cbfdd-e9cc-010e-1c54-792348a66d2a" [ 1731.002828] env[62346]: _type = "Task" [ 1731.002828] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.011063] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]521cbfdd-e9cc-010e-1c54-792348a66d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.513806] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.514182] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1731.514253] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.133090] env[62346]: DEBUG nova.compute.manager [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Received event network-changed-9da3d659-f0e4-45ab-ad7d-88124ef9c53f {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1732.133254] env[62346]: DEBUG nova.compute.manager [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Refreshing instance network info cache due to event network-changed-9da3d659-f0e4-45ab-ad7d-88124ef9c53f. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1732.133477] env[62346]: DEBUG oslo_concurrency.lockutils [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] Acquiring lock "refresh_cache-052de992-f28b-4c25-bfbe-3517665f1902" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.133581] env[62346]: DEBUG oslo_concurrency.lockutils [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] Acquired lock "refresh_cache-052de992-f28b-4c25-bfbe-3517665f1902" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.133747] env[62346]: DEBUG nova.network.neutron [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Refreshing network info cache for port 9da3d659-f0e4-45ab-ad7d-88124ef9c53f {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1732.451486] env[62346]: DEBUG nova.network.neutron [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Updated VIF entry in instance network info cache for port 9da3d659-f0e4-45ab-ad7d-88124ef9c53f. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1732.451843] env[62346]: DEBUG nova.network.neutron [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Updating instance_info_cache with network_info: [{"id": "9da3d659-f0e4-45ab-ad7d-88124ef9c53f", "address": "fa:16:3e:c1:69:70", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9da3d659-f0", "ovs_interfaceid": "9da3d659-f0e4-45ab-ad7d-88124ef9c53f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.461213] env[62346]: DEBUG oslo_concurrency.lockutils [req-c24a7b72-9dc1-486a-a323-30aceb0ec9e4 req-0ea5ba9f-ddd9-4331-b4c0-09f4b616901a service nova] Releasing lock "refresh_cache-052de992-f28b-4c25-bfbe-3517665f1902" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.395787] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.396119] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.407878] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1742.465083] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.465083] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.465404] env[62346]: INFO nova.compute.claims [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1742.641756] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11594b75-e37b-4d2d-92dd-9f8da05b7303 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.650483] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959fd71b-c33a-4ccc-ad31-9b08a3b48fc4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.682168] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e5f978-22ca-4398-bdab-33790cc11879 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.690249] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fd7b72-7275-4afc-a632-b62182706787 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.704048] env[62346]: DEBUG nova.compute.provider_tree [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.716080] env[62346]: DEBUG nova.scheduler.client.report [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1742.733262] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.733834] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1742.768312] env[62346]: DEBUG nova.compute.utils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1742.769822] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1742.769987] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1742.779093] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1742.832897] env[62346]: DEBUG nova.policy [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3162e3d614e54ef68baf2c5f0671c7ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07ea81cc0ce14cb19c28dd7011ca9fd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1742.853627] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1742.880562] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.880856] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.881032] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.881227] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.881407] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.881550] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.881754] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.882249] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.882249] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.882249] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.882457] env[62346]: DEBUG nova.virt.hardware [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.883332] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ec0560-72b9-4172-8c6d-532d2ea00bb9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.892261] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3ed04f-4d2b-4cc9-ac0a-7d04eaf6d82c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.177044] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Successfully created port: 1727e96a-559a-40b3-92bd-e0d8fff36b65 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1743.874356] env[62346]: DEBUG nova.compute.manager [req-ec79a625-d2eb-4ecb-847e-ef1f16b4a0d1 req-309ca3a3-d487-4840-ac02-5a0d13db60ef service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Received event network-vif-plugged-1727e96a-559a-40b3-92bd-e0d8fff36b65 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1743.874634] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec79a625-d2eb-4ecb-847e-ef1f16b4a0d1 req-309ca3a3-d487-4840-ac02-5a0d13db60ef service nova] Acquiring lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.874788] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec79a625-d2eb-4ecb-847e-ef1f16b4a0d1 req-309ca3a3-d487-4840-ac02-5a0d13db60ef service nova] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.874936] env[62346]: DEBUG oslo_concurrency.lockutils [req-ec79a625-d2eb-4ecb-847e-ef1f16b4a0d1 req-309ca3a3-d487-4840-ac02-5a0d13db60ef service nova] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.875279] env[62346]: DEBUG nova.compute.manager [req-ec79a625-d2eb-4ecb-847e-ef1f16b4a0d1 req-309ca3a3-d487-4840-ac02-5a0d13db60ef service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] No waiting events found dispatching network-vif-plugged-1727e96a-559a-40b3-92bd-e0d8fff36b65 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1743.875511] env[62346]: WARNING nova.compute.manager [req-ec79a625-d2eb-4ecb-847e-ef1f16b4a0d1 req-309ca3a3-d487-4840-ac02-5a0d13db60ef service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Received unexpected event network-vif-plugged-1727e96a-559a-40b3-92bd-e0d8fff36b65 for instance with vm_state building and task_state spawning. [ 1743.989494] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Successfully updated port: 1727e96a-559a-40b3-92bd-e0d8fff36b65 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1743.997811] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "refresh_cache-0f6433b0-fa14-4546-b4f0-c7c1edf8433e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.997994] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquired lock "refresh_cache-0f6433b0-fa14-4546-b4f0-c7c1edf8433e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.998179] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1744.052376] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1744.246357] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Updating instance_info_cache with network_info: [{"id": "1727e96a-559a-40b3-92bd-e0d8fff36b65", "address": "fa:16:3e:90:95:4c", "network": {"id": "2705be59-a4b9-414e-8ec4-a2c4ce3ecb46", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-971115185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ea81cc0ce14cb19c28dd7011ca9fd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1727e96a-55", "ovs_interfaceid": "1727e96a-559a-40b3-92bd-e0d8fff36b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.261114] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Releasing lock "refresh_cache-0f6433b0-fa14-4546-b4f0-c7c1edf8433e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.261441] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance network_info: |[{"id": "1727e96a-559a-40b3-92bd-e0d8fff36b65", "address": "fa:16:3e:90:95:4c", "network": {"id": "2705be59-a4b9-414e-8ec4-a2c4ce3ecb46", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-971115185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ea81cc0ce14cb19c28dd7011ca9fd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1727e96a-55", "ovs_interfaceid": "1727e96a-559a-40b3-92bd-e0d8fff36b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1744.261862] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:95:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1727e96a-559a-40b3-92bd-e0d8fff36b65', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1744.269617] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Creating folder: Project (07ea81cc0ce14cb19c28dd7011ca9fd7). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1744.270295] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2c7a1e4-8e82-47b9-949b-9b39049ed41a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.282225] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Created folder: Project (07ea81cc0ce14cb19c28dd7011ca9fd7) in parent group-v953204. [ 1744.282425] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Creating folder: Instances. Parent ref: group-v953309. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1744.282676] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25e8da62-2573-4856-b32e-5ecba4d86e8d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.292732] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Created folder: Instances in parent group-v953309. [ 1744.293040] env[62346]: DEBUG oslo.service.loopingcall [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.293218] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1744.293436] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19bb943-df10-4773-a761-3a4ec49a687d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.314623] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1744.314623] env[62346]: value = "task-4891768" [ 1744.314623] env[62346]: _type = "Task" [ 1744.314623] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.324047] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891768, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.641946] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.665248] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Getting list of instances from cluster (obj){ [ 1744.665248] env[62346]: value = "domain-c8" [ 1744.665248] env[62346]: _type = "ClusterComputeResource" [ 1744.665248] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1744.667267] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c0688f-1725-40d3-9e62-5c698b85e33f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.686336] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Got total of 10 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1744.686524] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid c6d55895-0a7a-4088-a065-3337c6045878 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid e9f8e137-98d4-48ef-b642-8cd9aff72f87 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 8979ed84-fa1d-49a1-9f00-844d0b0f604a {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid daef9d9c-03a6-4ee8-9806-9d895f802776 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 5af6a907-80d7-4630-aa01-c600e4908d32 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 87c6dc89-e89b-4c72-b29c-16751a749d29 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 21a988a5-43cc-44f8-97f4-01c5442b6303 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 052de992-f28b-4c25-bfbe-3517665f1902 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688052] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 0f6433b0-fa14-4546-b4f0-c7c1edf8433e {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1744.688489] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "c6d55895-0a7a-4088-a065-3337c6045878" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.688489] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.688648] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.688927] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.689145] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "daef9d9c-03a6-4ee8-9806-9d895f802776" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.689362] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "5af6a907-80d7-4630-aa01-c600e4908d32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.689538] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "87c6dc89-e89b-4c72-b29c-16751a749d29" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.689740] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "21a988a5-43cc-44f8-97f4-01c5442b6303" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.690149] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "052de992-f28b-4c25-bfbe-3517665f1902" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.690390] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.824649] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891768, 'name': CreateVM_Task, 'duration_secs': 0.329545} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.824838] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1744.825504] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.825667] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.825982] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1744.826242] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-549c2974-2c46-4736-913d-d298200bd750 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.832038] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Waiting for the task: (returnval){ [ 1744.832038] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52811bf8-62e4-8cef-e3f5-b2da11c5d7f7" [ 1744.832038] env[62346]: _type = "Task" [ 1744.832038] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.839665] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52811bf8-62e4-8cef-e3f5-b2da11c5d7f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.342569] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.342948] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1745.343037] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.900692] env[62346]: DEBUG nova.compute.manager [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Received event network-changed-1727e96a-559a-40b3-92bd-e0d8fff36b65 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1745.900911] env[62346]: DEBUG nova.compute.manager [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Refreshing instance network info cache due to event network-changed-1727e96a-559a-40b3-92bd-e0d8fff36b65. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1745.901204] env[62346]: DEBUG oslo_concurrency.lockutils [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] Acquiring lock "refresh_cache-0f6433b0-fa14-4546-b4f0-c7c1edf8433e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.901445] env[62346]: DEBUG oslo_concurrency.lockutils [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] Acquired lock "refresh_cache-0f6433b0-fa14-4546-b4f0-c7c1edf8433e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.901709] env[62346]: DEBUG nova.network.neutron [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Refreshing network info cache for port 1727e96a-559a-40b3-92bd-e0d8fff36b65 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1746.444947] env[62346]: DEBUG nova.network.neutron [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Updated VIF entry in instance network info cache for port 1727e96a-559a-40b3-92bd-e0d8fff36b65. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1746.445344] env[62346]: DEBUG nova.network.neutron [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Updating instance_info_cache with network_info: [{"id": "1727e96a-559a-40b3-92bd-e0d8fff36b65", "address": "fa:16:3e:90:95:4c", "network": {"id": "2705be59-a4b9-414e-8ec4-a2c4ce3ecb46", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-971115185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07ea81cc0ce14cb19c28dd7011ca9fd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1727e96a-55", "ovs_interfaceid": "1727e96a-559a-40b3-92bd-e0d8fff36b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.455196] env[62346]: DEBUG oslo_concurrency.lockutils [req-831e3811-fcd9-4728-8aaf-b6966b78d99d req-6202c06a-111f-4f02-979b-9d61583c6693 service nova] Releasing lock "refresh_cache-0f6433b0-fa14-4546-b4f0-c7c1edf8433e" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.221032] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.221032] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.221032] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.233897] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.233897] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.233897] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.233897] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1772.235121] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d71b182-02e5-42f4-a62e-d3216c1060ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.244148] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74302fc2-dde6-413d-b02d-8a471bbc4fb2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.259357] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4da02b-ce2a-4581-b524-5031bf4cae49 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.266963] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c1dd61-544f-4ca2-8682-e4004ad9ee0e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.296763] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180563MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1772.297017] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.297199] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.458632] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance c6d55895-0a7a-4088-a065-3337c6045878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.458799] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.458931] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459099] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459245] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459369] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459485] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459600] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459713] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.459827] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1772.460035] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1772.460207] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '78', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_8317b2c0c98049fe8044a0edb4bca89c': '1', 'io_workload': '10', 'num_proj_718112c25d784fc1aa3f11916d691658': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '2', 'num_proj_7d571ab102004368b9265ca62b137356': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_task_spawning': '2', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1772.477470] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing inventories for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1772.491613] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating ProviderTree inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1772.491813] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1772.503776] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing aggregate associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, aggregates: None {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1772.523464] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing trait associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1772.655860] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db2ecf3-2095-48cf-81fd-0fa236194806 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.665346] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27af951-60b7-4a99-bcec-c6fe11d33248 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.695356] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056d01bb-d162-441f-a74b-744956a8b439 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.703469] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2243a1-7b32-460f-9d88-9d70a9eae6f1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.716903] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.725207] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.740092] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1772.740321] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.443s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.741849] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.741849] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1773.741849] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1773.763817] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.763817] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.764467] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.764835] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.765134] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.765415] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.766818] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.766818] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.766818] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.766818] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.766818] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1773.766818] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.220400] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.220400] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.220400] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1777.617661] env[62346]: WARNING oslo_vmware.rw_handles [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1777.617661] env[62346]: ERROR oslo_vmware.rw_handles [ 1777.618545] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1777.620126] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1777.620412] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Copying Virtual Disk [datastore2] vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/b5841300-29f0-43c2-bc46-6072b2c11041/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1777.620694] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07ca3782-eed5-4dd4-b22e-837a554c682b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.629123] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1777.629123] env[62346]: value = "task-4891769" [ 1777.629123] env[62346]: _type = "Task" [ 1777.629123] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.637274] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': task-4891769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.139330] env[62346]: DEBUG oslo_vmware.exceptions [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1778.139616] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.140211] env[62346]: ERROR nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1778.140211] env[62346]: Faults: ['InvalidArgument'] [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] Traceback (most recent call last): [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] yield resources [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self.driver.spawn(context, instance, image_meta, [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self._fetch_image_if_missing(context, vi) [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] image_cache(vi, tmp_image_ds_loc) [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] vm_util.copy_virtual_disk( [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] session._wait_for_task(vmdk_copy_task) [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] return self.wait_for_task(task_ref) [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] return evt.wait() [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] result = hub.switch() [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] return self.greenlet.switch() [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self.f(*self.args, **self.kw) [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] raise exceptions.translate_fault(task_info.error) [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] Faults: ['InvalidArgument'] [ 1778.140211] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] [ 1778.141584] env[62346]: INFO nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Terminating instance [ 1778.142135] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.142340] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1778.142573] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09cb12f7-386e-47ba-bbb1-8db0009cc789 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.144741] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1778.144930] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1778.145653] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da15f84e-4e76-4205-b09d-782df35d3b07 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.153984] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1778.154227] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e7f3cdd-57b9-491f-8573-bfd5570f66f0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.156481] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1778.156650] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1778.157595] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35db57f8-dbb2-4cef-a02d-34ed9643330f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.162690] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Waiting for the task: (returnval){ [ 1778.162690] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5264d771-f8d9-5876-1069-6e8b73b6b298" [ 1778.162690] env[62346]: _type = "Task" [ 1778.162690] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.172363] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5264d771-f8d9-5876-1069-6e8b73b6b298, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.220102] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.224655] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1778.224940] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1778.225185] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Deleting the datastore file [datastore2] c6d55895-0a7a-4088-a065-3337c6045878 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1778.225458] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e741fc39-eb54-4845-b27a-b944a9afeaca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.232495] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for the task: (returnval){ [ 1778.232495] env[62346]: value = "task-4891771" [ 1778.232495] env[62346]: _type = "Task" [ 1778.232495] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.240731] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': task-4891771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.673109] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1778.673496] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Creating directory with path [datastore2] vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1778.673620] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-859611de-e82f-4ef0-b7ad-f1650814b6b6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.685884] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Created directory with path [datastore2] vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1778.686127] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Fetch image to [datastore2] vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1778.686302] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1778.687060] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3932f703-661c-4370-a308-a95d0c42f667 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.694369] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243e71d7-bfd6-4233-b298-d34861dbc1a2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.703882] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfdebd3-dc08-4469-ab88-d804d5f35eef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.738760] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a284894-f787-4735-bb27-96eb379149e1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.747727] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9809ed85-76dc-456a-960a-3bfa23c7798b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.749469] env[62346]: DEBUG oslo_vmware.api [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Task: {'id': task-4891771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071899} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.749704] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1778.749883] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1778.750071] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1778.750254] env[62346]: INFO nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1778.752414] env[62346]: DEBUG nova.compute.claims [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1778.752589] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.752801] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.775817] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1778.828748] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1778.891518] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1778.891723] env[62346]: DEBUG oslo_vmware.rw_handles [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1778.986320] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac813d3-1522-4e36-9041-36b095e28984 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.995603] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b2fdee-e2ec-484a-afe8-e7ad005cf89d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.025811] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7982c5-057e-40a2-bdd7-be07720d4866 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.033975] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d69527a-bc62-48ee-8e0f-efa09bebe5f2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.047802] env[62346]: DEBUG nova.compute.provider_tree [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.057785] env[62346]: DEBUG nova.scheduler.client.report [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1779.073893] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.321s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.074466] env[62346]: ERROR nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1779.074466] env[62346]: Faults: ['InvalidArgument'] [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] Traceback (most recent call last): [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self.driver.spawn(context, instance, image_meta, [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self._fetch_image_if_missing(context, vi) [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] image_cache(vi, tmp_image_ds_loc) [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] vm_util.copy_virtual_disk( [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] session._wait_for_task(vmdk_copy_task) [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] return self.wait_for_task(task_ref) [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] return evt.wait() [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] result = hub.switch() [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] return self.greenlet.switch() [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] self.f(*self.args, **self.kw) [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] raise exceptions.translate_fault(task_info.error) [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] Faults: ['InvalidArgument'] [ 1779.074466] env[62346]: ERROR nova.compute.manager [instance: c6d55895-0a7a-4088-a065-3337c6045878] [ 1779.075317] env[62346]: DEBUG nova.compute.utils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1779.076750] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Build of instance c6d55895-0a7a-4088-a065-3337c6045878 was re-scheduled: A specified parameter was not correct: fileType [ 1779.076750] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1779.077149] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1779.077368] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1779.077558] env[62346]: DEBUG nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1779.077725] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1779.492437] env[62346]: DEBUG nova.network.neutron [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.508351] env[62346]: INFO nova.compute.manager [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Took 0.43 seconds to deallocate network for instance. [ 1779.600655] env[62346]: INFO nova.scheduler.client.report [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Deleted allocations for instance c6d55895-0a7a-4088-a065-3337c6045878 [ 1779.623572] env[62346]: DEBUG oslo_concurrency.lockutils [None req-4c10c6f4-bf2a-470d-888b-337a76bac710 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "c6d55895-0a7a-4088-a065-3337c6045878" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.404s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.623907] env[62346]: DEBUG oslo_concurrency.lockutils [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "c6d55895-0a7a-4088-a065-3337c6045878" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.228s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.624160] env[62346]: DEBUG oslo_concurrency.lockutils [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Acquiring lock "c6d55895-0a7a-4088-a065-3337c6045878-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.624484] env[62346]: DEBUG oslo_concurrency.lockutils [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "c6d55895-0a7a-4088-a065-3337c6045878-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.624663] env[62346]: DEBUG oslo_concurrency.lockutils [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "c6d55895-0a7a-4088-a065-3337c6045878-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.626930] env[62346]: INFO nova.compute.manager [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Terminating instance [ 1779.629270] env[62346]: DEBUG nova.compute.manager [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1779.629511] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1779.629980] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac4e3eb6-a671-493b-806f-6b54ff91e9db {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.639334] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cfe806-b0e3-4644-a99c-4a656c30088f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.673034] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6d55895-0a7a-4088-a065-3337c6045878 could not be found. [ 1779.673271] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1779.673535] env[62346]: INFO nova.compute.manager [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1779.673700] env[62346]: DEBUG oslo.service.loopingcall [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.673937] env[62346]: DEBUG nova.compute.manager [-] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1779.674045] env[62346]: DEBUG nova.network.neutron [-] [instance: c6d55895-0a7a-4088-a065-3337c6045878] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1779.698550] env[62346]: DEBUG nova.network.neutron [-] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.706915] env[62346]: INFO nova.compute.manager [-] [instance: c6d55895-0a7a-4088-a065-3337c6045878] Took 0.03 seconds to deallocate network for instance. [ 1779.832568] env[62346]: DEBUG oslo_concurrency.lockutils [None req-72401c03-98be-42af-8bc1-af91267902d7 tempest-AttachVolumeShelveTestJSON-2108320383 tempest-AttachVolumeShelveTestJSON-2108320383-project-member] Lock "c6d55895-0a7a-4088-a065-3337c6045878" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.835025] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "c6d55895-0a7a-4088-a065-3337c6045878" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 35.145s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.835025] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: c6d55895-0a7a-4088-a065-3337c6045878] During sync_power_state the instance has a pending task (deleting). Skip. [ 1779.835025] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "c6d55895-0a7a-4088-a065-3337c6045878" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.220257] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.194847] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "ca0f017f-3bca-401f-8e70-83a7a5061116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.195187] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.206745] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1790.265563] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.265906] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.268153] env[62346]: INFO nova.compute.claims [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1790.474887] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb8801b-d9ef-4a45-ad9b-1f1f5b5b7fcb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.483874] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a576beff-f2e9-4616-8a1b-4943e0272ee0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.515778] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d713bc3-a43d-4987-9d01-bf86c9b90fd6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.524335] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95277b5-bf62-4731-ae3a-60b4b4ed0d86 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.539244] env[62346]: DEBUG nova.compute.provider_tree [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.548691] env[62346]: DEBUG nova.scheduler.client.report [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.563697] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.564204] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1790.600979] env[62346]: DEBUG nova.compute.utils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1790.602315] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1790.602488] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1790.614039] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1790.677956] env[62346]: DEBUG nova.policy [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b68a54af151441e6b6853c5502518db8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5afa33f3f2b94e68a5161002a9718f78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1790.684167] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1790.711651] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.711872] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.712042] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.712230] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.712378] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.712527] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.712731] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.712885] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.713181] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.713385] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.713558] env[62346]: DEBUG nova.virt.hardware [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.714762] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3712f3-7b45-4699-ab41-76a1760c44d6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.724426] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4791229a-ff4a-4fdf-a129-e855d8627e3a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.079929] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Successfully created port: cf1334e6-6268-4c85-8b5f-867b2528277c {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1791.825016] env[62346]: DEBUG nova.compute.manager [req-fc964171-2fbd-475d-9db8-c5f948aeb7e2 req-1a80b73f-93ce-4f25-845f-9f2cc981f6b9 service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Received event network-vif-plugged-cf1334e6-6268-4c85-8b5f-867b2528277c {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1791.825316] env[62346]: DEBUG oslo_concurrency.lockutils [req-fc964171-2fbd-475d-9db8-c5f948aeb7e2 req-1a80b73f-93ce-4f25-845f-9f2cc981f6b9 service nova] Acquiring lock "ca0f017f-3bca-401f-8e70-83a7a5061116-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.825443] env[62346]: DEBUG oslo_concurrency.lockutils [req-fc964171-2fbd-475d-9db8-c5f948aeb7e2 req-1a80b73f-93ce-4f25-845f-9f2cc981f6b9 service nova] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.825613] env[62346]: DEBUG oslo_concurrency.lockutils [req-fc964171-2fbd-475d-9db8-c5f948aeb7e2 req-1a80b73f-93ce-4f25-845f-9f2cc981f6b9 service nova] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.825776] env[62346]: DEBUG nova.compute.manager [req-fc964171-2fbd-475d-9db8-c5f948aeb7e2 req-1a80b73f-93ce-4f25-845f-9f2cc981f6b9 service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] No waiting events found dispatching network-vif-plugged-cf1334e6-6268-4c85-8b5f-867b2528277c {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1791.826120] env[62346]: WARNING nova.compute.manager [req-fc964171-2fbd-475d-9db8-c5f948aeb7e2 req-1a80b73f-93ce-4f25-845f-9f2cc981f6b9 service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Received unexpected event network-vif-plugged-cf1334e6-6268-4c85-8b5f-867b2528277c for instance with vm_state building and task_state spawning. [ 1792.163082] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Successfully updated port: cf1334e6-6268-4c85-8b5f-867b2528277c {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1792.186058] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "refresh_cache-ca0f017f-3bca-401f-8e70-83a7a5061116" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.186212] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "refresh_cache-ca0f017f-3bca-401f-8e70-83a7a5061116" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.186356] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1792.277979] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1792.641360] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Updating instance_info_cache with network_info: [{"id": "cf1334e6-6268-4c85-8b5f-867b2528277c", "address": "fa:16:3e:d0:74:6f", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf1334e6-62", "ovs_interfaceid": "cf1334e6-6268-4c85-8b5f-867b2528277c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.653203] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "refresh_cache-ca0f017f-3bca-401f-8e70-83a7a5061116" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.653521] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance network_info: |[{"id": "cf1334e6-6268-4c85-8b5f-867b2528277c", "address": "fa:16:3e:d0:74:6f", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf1334e6-62", "ovs_interfaceid": "cf1334e6-6268-4c85-8b5f-867b2528277c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1792.653931] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:74:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf1334e6-6268-4c85-8b5f-867b2528277c', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1792.661251] env[62346]: DEBUG oslo.service.loopingcall [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1792.661742] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1792.661974] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcb3de84-e325-42bc-a010-52a6919b9775 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.682353] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1792.682353] env[62346]: value = "task-4891772" [ 1792.682353] env[62346]: _type = "Task" [ 1792.682353] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.690264] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891772, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.194418] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891772, 'name': CreateVM_Task, 'duration_secs': 0.29281} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.194737] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1793.195261] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.195425] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.195749] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1793.195994] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1dab2e2-795b-4cbb-b7e6-3cc711e009cb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.200708] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 1793.200708] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520d8d5e-5edc-c49e-5cb9-f663a1c56237" [ 1793.200708] env[62346]: _type = "Task" [ 1793.200708] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.208355] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520d8d5e-5edc-c49e-5cb9-f663a1c56237, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.711314] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.711577] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1793.711785] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.044977] env[62346]: DEBUG nova.compute.manager [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Received event network-changed-cf1334e6-6268-4c85-8b5f-867b2528277c {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1794.045198] env[62346]: DEBUG nova.compute.manager [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Refreshing instance network info cache due to event network-changed-cf1334e6-6268-4c85-8b5f-867b2528277c. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1794.045421] env[62346]: DEBUG oslo_concurrency.lockutils [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] Acquiring lock "refresh_cache-ca0f017f-3bca-401f-8e70-83a7a5061116" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.045566] env[62346]: DEBUG oslo_concurrency.lockutils [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] Acquired lock "refresh_cache-ca0f017f-3bca-401f-8e70-83a7a5061116" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.045726] env[62346]: DEBUG nova.network.neutron [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Refreshing network info cache for port cf1334e6-6268-4c85-8b5f-867b2528277c {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1794.430382] env[62346]: DEBUG nova.network.neutron [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Updated VIF entry in instance network info cache for port cf1334e6-6268-4c85-8b5f-867b2528277c. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1794.430807] env[62346]: DEBUG nova.network.neutron [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Updating instance_info_cache with network_info: [{"id": "cf1334e6-6268-4c85-8b5f-867b2528277c", "address": "fa:16:3e:d0:74:6f", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf1334e6-62", "ovs_interfaceid": "cf1334e6-6268-4c85-8b5f-867b2528277c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.440778] env[62346]: DEBUG oslo_concurrency.lockutils [req-74f477dc-51eb-408e-ab7e-6e59bd927c66 req-ae6696a3-887a-41d5-aead-c2032b834d3d service nova] Releasing lock "refresh_cache-ca0f017f-3bca-401f-8e70-83a7a5061116" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.306208] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.306560] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.841436] env[62346]: WARNING oslo_vmware.rw_handles [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1825.841436] env[62346]: ERROR oslo_vmware.rw_handles [ 1825.842280] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1825.843828] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1825.844095] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Copying Virtual Disk [datastore2] vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/27b5508a-2f37-4a81-832f-ac13d6d27224/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1825.844376] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8c7126c-3fda-4aec-ad30-a23bef8bb6a9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.853204] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Waiting for the task: (returnval){ [ 1825.853204] env[62346]: value = "task-4891773" [ 1825.853204] env[62346]: _type = "Task" [ 1825.853204] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.861661] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Task: {'id': task-4891773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.364414] env[62346]: DEBUG oslo_vmware.exceptions [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1826.364716] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.365273] env[62346]: ERROR nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1826.365273] env[62346]: Faults: ['InvalidArgument'] [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Traceback (most recent call last): [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] yield resources [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self.driver.spawn(context, instance, image_meta, [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self._fetch_image_if_missing(context, vi) [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] image_cache(vi, tmp_image_ds_loc) [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] vm_util.copy_virtual_disk( [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] session._wait_for_task(vmdk_copy_task) [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] return self.wait_for_task(task_ref) [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] return evt.wait() [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] result = hub.switch() [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] return self.greenlet.switch() [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self.f(*self.args, **self.kw) [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] raise exceptions.translate_fault(task_info.error) [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Faults: ['InvalidArgument'] [ 1826.365273] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] [ 1826.366529] env[62346]: INFO nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Terminating instance [ 1826.367301] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.367548] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.367865] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-043daa72-2dab-471e-89b4-e931276ad6a1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.370235] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1826.370426] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1826.371192] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bcd28a-c895-4647-83b1-9406e70c75d7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.378767] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1826.379032] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9299bbe1-bcc2-4fc6-9c86-4108527f30e5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.381496] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.381670] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1826.382718] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7406e02-23ae-49a7-85b9-241db09b586a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.388011] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1826.388011] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522144e5-e559-4f63-9fc8-1448ca263f3c" [ 1826.388011] env[62346]: _type = "Task" [ 1826.388011] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.396625] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]522144e5-e559-4f63-9fc8-1448ca263f3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.452732] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1826.452954] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1826.453156] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Deleting the datastore file [datastore2] e9f8e137-98d4-48ef-b642-8cd9aff72f87 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1826.453480] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cc8caa1-7027-4881-af55-672a76a2a44a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.461339] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Waiting for the task: (returnval){ [ 1826.461339] env[62346]: value = "task-4891775" [ 1826.461339] env[62346]: _type = "Task" [ 1826.461339] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.469710] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Task: {'id': task-4891775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.898820] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1826.899304] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.899376] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a85b7176-63d0-4a25-8329-3bc5fd8a4da2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.911874] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.912106] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Fetch image to [datastore2] vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1826.912390] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1826.913219] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b720429-170d-4a74-8b1c-27fb08c2e99e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.920785] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9651c7-6bfe-4750-8f98-caf203224909 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.930799] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4380d1fc-71ec-4ae3-bf0c-b35b3da3eb2f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.966700] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f232495-0d61-49e2-8953-f45e8368dc48 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.974657] env[62346]: DEBUG oslo_vmware.api [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Task: {'id': task-4891775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071434} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.976238] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1826.976431] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1826.976607] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1826.976782] env[62346]: INFO nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1826.978647] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f99a87ea-5758-47fe-b077-9d8932dc523c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.980681] env[62346]: DEBUG nova.compute.claims [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1826.980889] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.981146] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.006428] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1827.066850] env[62346]: DEBUG oslo_vmware.rw_handles [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1827.126782] env[62346]: DEBUG oslo_vmware.rw_handles [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1827.126967] env[62346]: DEBUG oslo_vmware.rw_handles [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1827.227227] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b293e6d-953e-489f-83c1-af2a5fd15b9d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.237342] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc18823c-f947-45cb-8378-6111260fe45b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.268016] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403aabbb-c7a3-4178-a8d9-54c31ea46692 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.276349] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60e007a-c028-4685-9023-1a2a8c285738 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.291310] env[62346]: DEBUG nova.compute.provider_tree [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1827.301276] env[62346]: DEBUG nova.scheduler.client.report [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1827.318837] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.338s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.319403] env[62346]: ERROR nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1827.319403] env[62346]: Faults: ['InvalidArgument'] [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Traceback (most recent call last): [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self.driver.spawn(context, instance, image_meta, [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self._fetch_image_if_missing(context, vi) [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] image_cache(vi, tmp_image_ds_loc) [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] vm_util.copy_virtual_disk( [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] session._wait_for_task(vmdk_copy_task) [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] return self.wait_for_task(task_ref) [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] return evt.wait() [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] result = hub.switch() [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] return self.greenlet.switch() [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] self.f(*self.args, **self.kw) [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] raise exceptions.translate_fault(task_info.error) [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Faults: ['InvalidArgument'] [ 1827.319403] env[62346]: ERROR nova.compute.manager [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] [ 1827.320415] env[62346]: DEBUG nova.compute.utils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1827.321663] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Build of instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 was re-scheduled: A specified parameter was not correct: fileType [ 1827.321663] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1827.322039] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1827.322216] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1827.322388] env[62346]: DEBUG nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1827.322549] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1827.646669] env[62346]: DEBUG nova.network.neutron [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.664708] env[62346]: INFO nova.compute.manager [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Took 0.34 seconds to deallocate network for instance. [ 1827.771115] env[62346]: INFO nova.scheduler.client.report [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Deleted allocations for instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 [ 1827.820964] env[62346]: DEBUG oslo_concurrency.lockutils [None req-9dee7d13-736d-49b3-b4ee-966ea7d606cb tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.497s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.822239] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.114s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.822726] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Acquiring lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.822726] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.822954] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.825856] env[62346]: INFO nova.compute.manager [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Terminating instance [ 1827.827657] env[62346]: DEBUG nova.compute.manager [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1827.828187] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1827.828405] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4637a704-7db4-4475-9373-06966cd6ee40 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.837951] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe75e034-6a5c-435e-b025-ef9f9b95ce30 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.848947] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1827.873784] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9f8e137-98d4-48ef-b642-8cd9aff72f87 could not be found. [ 1827.874000] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1827.874200] env[62346]: INFO nova.compute.manager [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1827.874476] env[62346]: DEBUG oslo.service.loopingcall [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.874714] env[62346]: DEBUG nova.compute.manager [-] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1827.874809] env[62346]: DEBUG nova.network.neutron [-] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1827.907383] env[62346]: DEBUG nova.network.neutron [-] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.909578] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.909994] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.912933] env[62346]: INFO nova.compute.claims [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1827.917011] env[62346]: INFO nova.compute.manager [-] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] Took 0.04 seconds to deallocate network for instance. [ 1828.022181] env[62346]: DEBUG oslo_concurrency.lockutils [None req-b0b60e77-e47d-470b-bf54-76851ec0d040 tempest-ListServersNegativeTestJSON-2032885685 tempest-ListServersNegativeTestJSON-2032885685-project-member] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.023111] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 83.335s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.023304] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9f8e137-98d4-48ef-b642-8cd9aff72f87] During sync_power_state the instance has a pending task (deleting). Skip. [ 1828.023479] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "e9f8e137-98d4-48ef-b642-8cd9aff72f87" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.106972] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8917724e-dd62-4f83-ab30-42fe0e490721 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.115433] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9890fd38-d0a0-4482-bf57-1adc96559a17 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.145975] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804e23ad-c1b5-4ad0-9ad1-cc7210c2b8e0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.153485] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539331a5-820d-4772-a53e-367720169a66 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.167372] env[62346]: DEBUG nova.compute.provider_tree [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.177956] env[62346]: DEBUG nova.scheduler.client.report [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1828.191073] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.191568] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1828.236332] env[62346]: DEBUG nova.compute.utils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1828.238044] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1828.238044] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1828.250515] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1828.306658] env[62346]: DEBUG nova.policy [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25d1165c3195422fa8187597c407a96d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73789014fd6240a893858419fd97d5b2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1828.327343] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1828.353560] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1828.353808] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1828.353967] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1828.354163] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1828.354308] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1828.354453] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1828.354700] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1828.354821] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1828.354990] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1828.355170] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1828.355344] env[62346]: DEBUG nova.virt.hardware [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1828.356258] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9d891a-19e7-4430-b10c-0ed0ba67c1bb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.365857] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95914e21-b197-40e7-b7be-7fb82f56d396 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.641694] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Successfully created port: cb8b88d7-b285-448b-af65-15fb6aec8431 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1829.446664] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Successfully updated port: cb8b88d7-b285-448b-af65-15fb6aec8431 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1829.458548] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "refresh_cache-e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.458548] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquired lock "refresh_cache-e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.458548] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1829.523034] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1829.723276] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Updating instance_info_cache with network_info: [{"id": "cb8b88d7-b285-448b-af65-15fb6aec8431", "address": "fa:16:3e:6e:7e:c2", "network": {"id": "fd9d92ae-fa33-48d8-a777-cc9c911ee23b", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-956128633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73789014fd6240a893858419fd97d5b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8b88d7-b2", "ovs_interfaceid": "cb8b88d7-b285-448b-af65-15fb6aec8431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.735933] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Releasing lock "refresh_cache-e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.736258] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance network_info: |[{"id": "cb8b88d7-b285-448b-af65-15fb6aec8431", "address": "fa:16:3e:6e:7e:c2", "network": {"id": "fd9d92ae-fa33-48d8-a777-cc9c911ee23b", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-956128633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73789014fd6240a893858419fd97d5b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8b88d7-b2", "ovs_interfaceid": "cb8b88d7-b285-448b-af65-15fb6aec8431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1829.736672] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:7e:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb8b88d7-b285-448b-af65-15fb6aec8431', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1829.744173] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Creating folder: Project (73789014fd6240a893858419fd97d5b2). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1829.744721] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48b68fb2-7409-4b7d-a4bc-88086f12d1f1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.756889] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Created folder: Project (73789014fd6240a893858419fd97d5b2) in parent group-v953204. [ 1829.757115] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Creating folder: Instances. Parent ref: group-v953313. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1829.757362] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d2edea6-5415-4b8a-8cb6-f05cbf910575 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.768343] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Created folder: Instances in parent group-v953313. [ 1829.768607] env[62346]: DEBUG oslo.service.loopingcall [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1829.768803] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1829.769028] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04272136-dd53-4e0a-a379-b20b9e0fca9c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.789150] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1829.789150] env[62346]: value = "task-4891778" [ 1829.789150] env[62346]: _type = "Task" [ 1829.789150] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.797606] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891778, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.808530] env[62346]: DEBUG nova.compute.manager [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Received event network-vif-plugged-cb8b88d7-b285-448b-af65-15fb6aec8431 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1829.808787] env[62346]: DEBUG oslo_concurrency.lockutils [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] Acquiring lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.808948] env[62346]: DEBUG oslo_concurrency.lockutils [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.809127] env[62346]: DEBUG oslo_concurrency.lockutils [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.809292] env[62346]: DEBUG nova.compute.manager [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] No waiting events found dispatching network-vif-plugged-cb8b88d7-b285-448b-af65-15fb6aec8431 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1829.809454] env[62346]: WARNING nova.compute.manager [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Received unexpected event network-vif-plugged-cb8b88d7-b285-448b-af65-15fb6aec8431 for instance with vm_state building and task_state spawning. [ 1829.809610] env[62346]: DEBUG nova.compute.manager [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Received event network-changed-cb8b88d7-b285-448b-af65-15fb6aec8431 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1829.809836] env[62346]: DEBUG nova.compute.manager [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Refreshing instance network info cache due to event network-changed-cb8b88d7-b285-448b-af65-15fb6aec8431. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1829.810065] env[62346]: DEBUG oslo_concurrency.lockutils [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] Acquiring lock "refresh_cache-e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.810209] env[62346]: DEBUG oslo_concurrency.lockutils [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] Acquired lock "refresh_cache-e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.810366] env[62346]: DEBUG nova.network.neutron [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Refreshing network info cache for port cb8b88d7-b285-448b-af65-15fb6aec8431 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1830.163931] env[62346]: DEBUG nova.network.neutron [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Updated VIF entry in instance network info cache for port cb8b88d7-b285-448b-af65-15fb6aec8431. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1830.164299] env[62346]: DEBUG nova.network.neutron [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Updating instance_info_cache with network_info: [{"id": "cb8b88d7-b285-448b-af65-15fb6aec8431", "address": "fa:16:3e:6e:7e:c2", "network": {"id": "fd9d92ae-fa33-48d8-a777-cc9c911ee23b", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-956128633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73789014fd6240a893858419fd97d5b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8b88d7-b2", "ovs_interfaceid": "cb8b88d7-b285-448b-af65-15fb6aec8431", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.183304] env[62346]: DEBUG oslo_concurrency.lockutils [req-61fca562-3298-4795-8db4-225106ba3c15 req-8993d12d-b665-40f2-8788-a28fe1fb0b81 service nova] Releasing lock "refresh_cache-e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.300781] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891778, 'name': CreateVM_Task, 'duration_secs': 0.304232} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.300954] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1830.308546] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.308735] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.309111] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1830.309412] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a3c7d27-708f-46d3-9303-5adeda7e514a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.315672] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Waiting for the task: (returnval){ [ 1830.315672] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52cb1d8e-7663-b60e-185c-a57feb43f42a" [ 1830.315672] env[62346]: _type = "Task" [ 1830.315672] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.325427] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52cb1d8e-7663-b60e-185c-a57feb43f42a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.828277] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.828719] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1830.828772] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.216195] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.219852] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.231496] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.231724] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.231891] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.232066] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1832.233222] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cb057c-fe3e-4fdc-93b5-174e37ceeb26 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.243396] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5792e3-edb5-4e1c-96d8-27303a4ffd2c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.258283] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977044b5-7e13-4cf5-8410-ec96ec837925 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.266123] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06b4543-5c91-4d11-883a-d2a72546006a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.297311] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180594MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1832.297513] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.297636] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.382588] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.382745] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.382870] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.382991] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383124] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383240] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383357] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383469] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383581] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383691] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.383887] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1832.384049] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '80', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '6', 'num_os_type_None': '10', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '2', 'io_workload': '10', 'num_proj_7d571ab102004368b9265ca62b137356': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '2', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_task_spawning': '4', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'num_proj_73789014fd6240a893858419fd97d5b2': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1832.530945] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e17f6fb-37b3-41b9-a950-b1d34e941eb1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.539984] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bd9516-c3fb-48a3-883e-1c4f5814ae91 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.573355] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1190e3b-f0a5-4d4b-97fa-bb6542c0aba3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.581605] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bba760a-0625-46b3-931e-fcf8a6a57989 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.595633] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.605659] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1832.627731] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1832.627731] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.330s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.627663] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.628087] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1833.628503] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1833.650047] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.650263] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.650368] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.650497] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.650622] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.650744] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.650888] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.651035] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.651159] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.651276] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1833.651395] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1834.220066] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.220295] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.220554] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.219793] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.220134] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1839.220172] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1844.215553] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1845.413898] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.414206] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.220178] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1875.857090] env[62346]: WARNING oslo_vmware.rw_handles [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1875.857090] env[62346]: ERROR oslo_vmware.rw_handles [ 1875.857975] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1875.859496] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1875.859737] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Copying Virtual Disk [datastore2] vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/d7614d28-3db2-4130-b0ea-4c0ecb954c97/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1875.860027] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bee6271d-bbb0-4033-9dbc-7e270e4fa391 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.868803] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1875.868803] env[62346]: value = "task-4891779" [ 1875.868803] env[62346]: _type = "Task" [ 1875.868803] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.877382] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.380066] env[62346]: DEBUG oslo_vmware.exceptions [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1876.380368] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.380929] env[62346]: ERROR nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1876.380929] env[62346]: Faults: ['InvalidArgument'] [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Traceback (most recent call last): [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] yield resources [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self.driver.spawn(context, instance, image_meta, [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self._fetch_image_if_missing(context, vi) [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] image_cache(vi, tmp_image_ds_loc) [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] vm_util.copy_virtual_disk( [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] session._wait_for_task(vmdk_copy_task) [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] return self.wait_for_task(task_ref) [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] return evt.wait() [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] result = hub.switch() [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] return self.greenlet.switch() [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self.f(*self.args, **self.kw) [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] raise exceptions.translate_fault(task_info.error) [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Faults: ['InvalidArgument'] [ 1876.380929] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] [ 1876.382154] env[62346]: INFO nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Terminating instance [ 1876.382858] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.383081] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1876.383321] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81f97ea4-6506-4e5c-9550-9ad09e64a64e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.387830] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1876.388034] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1876.388765] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984a47b3-4cdb-485f-b14a-2feab5749800 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.396204] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1876.396445] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6753ee82-9cb7-42a9-af3d-efbc2ea92c25 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.398834] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1876.399029] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1876.399999] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74ee5960-8eff-4f89-aacd-5f579f95da52 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.405341] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Waiting for the task: (returnval){ [ 1876.405341] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520ff20a-10c6-b0a8-f97d-93b8dbd0ad2d" [ 1876.405341] env[62346]: _type = "Task" [ 1876.405341] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.412667] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]520ff20a-10c6-b0a8-f97d-93b8dbd0ad2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.565683] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1876.565946] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1876.566183] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleting the datastore file [datastore2] f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1876.566467] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb22a556-ea91-4f4b-b2d2-aaba7f8b7fbe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.573128] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 1876.573128] env[62346]: value = "task-4891781" [ 1876.573128] env[62346]: _type = "Task" [ 1876.573128] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.581948] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.916949] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1876.917381] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Creating directory with path [datastore2] vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1876.917629] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eaaaebc-d357-4355-8aae-f6374c0459af {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.930915] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Created directory with path [datastore2] vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1876.931360] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Fetch image to [datastore2] vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1876.931360] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1876.932095] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dd2ec9-c87c-4445-b777-98b20c4af939 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.940583] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb02307-df88-4f52-a8f8-422936417ed4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.951717] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3c0452-e5a2-488c-a2d7-63673f1947b8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.983973] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7907052c-9c9b-40ec-9279-6e51c0511e28 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.991547] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cbb6a7e8-c26e-45a5-b426-94bee707f624 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.016427] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1877.072021] env[62346]: DEBUG oslo_vmware.rw_handles [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1877.134633] env[62346]: DEBUG oslo_vmware.api [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07336} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.136079] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1877.136282] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1877.136461] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1877.136637] env[62346]: INFO nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Took 0.75 seconds to destroy the instance on the hypervisor. [ 1877.138495] env[62346]: DEBUG oslo_vmware.rw_handles [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1877.138664] env[62346]: DEBUG oslo_vmware.rw_handles [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1877.139642] env[62346]: DEBUG nova.compute.claims [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1877.139817] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.140044] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.354699] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21a4166-9e53-4107-a0db-2d84debfa29b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.362940] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafdfe11-3a30-445c-97de-25ec54b63c83 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.394998] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43175ea-7eee-4f32-af8b-8bc75c4ca5c9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.403078] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adab2ed6-f9a8-4533-89cc-7896cd4d05dc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.417487] env[62346]: DEBUG nova.compute.provider_tree [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.426423] env[62346]: DEBUG nova.scheduler.client.report [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1877.442115] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.302s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.442612] env[62346]: ERROR nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1877.442612] env[62346]: Faults: ['InvalidArgument'] [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Traceback (most recent call last): [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self.driver.spawn(context, instance, image_meta, [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self._fetch_image_if_missing(context, vi) [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] image_cache(vi, tmp_image_ds_loc) [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] vm_util.copy_virtual_disk( [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] session._wait_for_task(vmdk_copy_task) [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] return self.wait_for_task(task_ref) [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] return evt.wait() [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] result = hub.switch() [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] return self.greenlet.switch() [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] self.f(*self.args, **self.kw) [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] raise exceptions.translate_fault(task_info.error) [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Faults: ['InvalidArgument'] [ 1877.442612] env[62346]: ERROR nova.compute.manager [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] [ 1877.443634] env[62346]: DEBUG nova.compute.utils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1877.444958] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Build of instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 was re-scheduled: A specified parameter was not correct: fileType [ 1877.444958] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1877.445352] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1877.445526] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1877.445695] env[62346]: DEBUG nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1877.445855] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1877.972815] env[62346]: DEBUG nova.network.neutron [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.989034] env[62346]: INFO nova.compute.manager [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Took 0.54 seconds to deallocate network for instance. [ 1878.114339] env[62346]: INFO nova.scheduler.client.report [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted allocations for instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 [ 1878.138583] env[62346]: DEBUG oslo_concurrency.lockutils [None req-04ac64ef-1026-46c3-8607-64cfebcd6f5a tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.154s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.140059] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.306s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.141019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.141019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.141019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.143408] env[62346]: INFO nova.compute.manager [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Terminating instance [ 1878.147618] env[62346]: DEBUG nova.compute.manager [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1878.149018] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1878.149018] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79ee16fe-bbf7-4825-97d2-655c915c87b1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.158831] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146edfd4-d95d-4c29-ab74-c338e9d00fc9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.172389] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1878.196762] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2d9789b-5250-4b2b-9046-d4cb8e67b8b1 could not be found. [ 1878.197056] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1878.197257] env[62346]: INFO nova.compute.manager [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1878.197509] env[62346]: DEBUG oslo.service.loopingcall [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.197749] env[62346]: DEBUG nova.compute.manager [-] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1878.197844] env[62346]: DEBUG nova.network.neutron [-] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1878.224970] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.225254] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.226832] env[62346]: INFO nova.compute.claims [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1878.229959] env[62346]: DEBUG nova.network.neutron [-] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.238473] env[62346]: INFO nova.compute.manager [-] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] Took 0.04 seconds to deallocate network for instance. [ 1878.347322] env[62346]: DEBUG oslo_concurrency.lockutils [None req-5e848258-cb1c-47fe-9223-fd793facfd67 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.348194] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 133.659s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.348411] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f2d9789b-5250-4b2b-9046-d4cb8e67b8b1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1878.348596] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "f2d9789b-5250-4b2b-9046-d4cb8e67b8b1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.423490] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b7728c-8ef0-4b76-b380-b0e9687b25f8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.432538] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1a281c-54df-46d0-8336-8cd9bed061f9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.464932] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb37daaa-ec3d-4ced-9e80-40f4ef030e01 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.473183] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ff2fee-af4a-44fe-8b45-67e6bb1ebaca {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.486953] env[62346]: DEBUG nova.compute.provider_tree [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1878.497460] env[62346]: DEBUG nova.scheduler.client.report [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1878.514614] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.515143] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1878.550147] env[62346]: DEBUG nova.compute.utils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1878.551988] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1878.551988] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1878.561332] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1878.615721] env[62346]: DEBUG nova.policy [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5aedaeeddb74c168d6651c8661f707e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3387b28c6e4b6d80fbe6cb9f955fe1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1878.630475] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1878.658954] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1878.659228] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1878.659391] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.659578] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1878.659723] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.659871] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1878.660091] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1878.660262] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1878.660485] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1878.660663] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1878.660840] env[62346]: DEBUG nova.virt.hardware [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1878.661722] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a8ac18-963f-4c71-93d3-bf8c15defb87 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.671675] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420e1853-0056-40a0-976d-f4b4191ff3e6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.978035] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Successfully created port: 9911ff50-aa0f-4142-98d1-cac57b179380 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1879.816944] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Successfully updated port: 9911ff50-aa0f-4142-98d1-cac57b179380 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1879.855189] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "refresh_cache-f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.855189] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquired lock "refresh_cache-f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.855189] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1879.896225] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1880.013510] env[62346]: DEBUG nova.compute.manager [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Received event network-vif-plugged-9911ff50-aa0f-4142-98d1-cac57b179380 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1880.013859] env[62346]: DEBUG oslo_concurrency.lockutils [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] Acquiring lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.014538] env[62346]: DEBUG oslo_concurrency.lockutils [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.014740] env[62346]: DEBUG oslo_concurrency.lockutils [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.014917] env[62346]: DEBUG nova.compute.manager [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] No waiting events found dispatching network-vif-plugged-9911ff50-aa0f-4142-98d1-cac57b179380 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1880.015101] env[62346]: WARNING nova.compute.manager [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Received unexpected event network-vif-plugged-9911ff50-aa0f-4142-98d1-cac57b179380 for instance with vm_state building and task_state spawning. [ 1880.015265] env[62346]: DEBUG nova.compute.manager [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Received event network-changed-9911ff50-aa0f-4142-98d1-cac57b179380 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1880.015419] env[62346]: DEBUG nova.compute.manager [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Refreshing instance network info cache due to event network-changed-9911ff50-aa0f-4142-98d1-cac57b179380. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1880.015588] env[62346]: DEBUG oslo_concurrency.lockutils [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] Acquiring lock "refresh_cache-f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.087610] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Updating instance_info_cache with network_info: [{"id": "9911ff50-aa0f-4142-98d1-cac57b179380", "address": "fa:16:3e:9d:47:5a", "network": {"id": "01531b0c-f862-4d8a-8c83-5176960a2cf5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-53771671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3387b28c6e4b6d80fbe6cb9f955fe1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9911ff50-aa", "ovs_interfaceid": "9911ff50-aa0f-4142-98d1-cac57b179380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.107379] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Releasing lock "refresh_cache-f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.107773] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance network_info: |[{"id": "9911ff50-aa0f-4142-98d1-cac57b179380", "address": "fa:16:3e:9d:47:5a", "network": {"id": "01531b0c-f862-4d8a-8c83-5176960a2cf5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-53771671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3387b28c6e4b6d80fbe6cb9f955fe1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9911ff50-aa", "ovs_interfaceid": "9911ff50-aa0f-4142-98d1-cac57b179380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1880.108194] env[62346]: DEBUG oslo_concurrency.lockutils [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] Acquired lock "refresh_cache-f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.108428] env[62346]: DEBUG nova.network.neutron [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Refreshing network info cache for port 9911ff50-aa0f-4142-98d1-cac57b179380 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1880.109995] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:47:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9911ff50-aa0f-4142-98d1-cac57b179380', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1880.118252] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Creating folder: Project (fc3387b28c6e4b6d80fbe6cb9f955fe1). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1880.122965] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8f44f90-3e37-493a-b0be-df826c779a17 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.135620] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Created folder: Project (fc3387b28c6e4b6d80fbe6cb9f955fe1) in parent group-v953204. [ 1880.135825] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Creating folder: Instances. Parent ref: group-v953316. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1880.136104] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cd8595a-093a-45e6-8c81-0f2801caddc6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.146319] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Created folder: Instances in parent group-v953316. [ 1880.146530] env[62346]: DEBUG oslo.service.loopingcall [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.146742] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1880.146950] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6fb9c11b-567d-40f8-9027-08d2521baf39 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.167667] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1880.167667] env[62346]: value = "task-4891784" [ 1880.167667] env[62346]: _type = "Task" [ 1880.167667] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.178531] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891784, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.506359] env[62346]: DEBUG nova.network.neutron [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Updated VIF entry in instance network info cache for port 9911ff50-aa0f-4142-98d1-cac57b179380. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1880.506746] env[62346]: DEBUG nova.network.neutron [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Updating instance_info_cache with network_info: [{"id": "9911ff50-aa0f-4142-98d1-cac57b179380", "address": "fa:16:3e:9d:47:5a", "network": {"id": "01531b0c-f862-4d8a-8c83-5176960a2cf5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-53771671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3387b28c6e4b6d80fbe6cb9f955fe1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9911ff50-aa", "ovs_interfaceid": "9911ff50-aa0f-4142-98d1-cac57b179380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.516989] env[62346]: DEBUG oslo_concurrency.lockutils [req-867bfa01-e3ef-414c-9afe-d0fb9c831755 req-61e6e6b8-de5b-4735-adbd-aece4cd4ba6b service nova] Releasing lock "refresh_cache-f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.678492] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891784, 'name': CreateVM_Task, 'duration_secs': 0.328194} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.678492] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1880.679146] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.679314] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.679643] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1880.679896] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6a413f8-30d1-4f36-bbee-255236d70351 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.685229] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Waiting for the task: (returnval){ [ 1880.685229] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52fa7907-3d28-d90e-aff8-3ccf92e23081" [ 1880.685229] env[62346]: _type = "Task" [ 1880.685229] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.699317] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.699553] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1880.699758] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.736409] env[62346]: DEBUG oslo_concurrency.lockutils [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "052de992-f28b-4c25-bfbe-3517665f1902" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.218160] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.218646] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.220557] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1892.233700] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.233930] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.234142] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.234291] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1892.235421] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d4df78-f3e8-4470-84c7-872aa2399373 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.244926] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b3b9e6-b73d-462c-ac54-f31d3328439a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.262743] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b608966-e1a6-4834-9447-c140703b6f0f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.270162] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955f6b40-3ce0-4781-9812-f5d2722f736a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.299902] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180566MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1892.300090] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.300277] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.375107] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.375312] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.375445] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.375568] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.375687] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.375814] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.376013] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.376061] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.376162] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.376266] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.388445] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1892.388694] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1892.388864] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '81', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '6', 'num_os_type_None': '10', 'num_proj_7d571ab102004368b9265ca62b137356': '1', 'io_workload': '10', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '2', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_task_spawning': '4', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1892.537969] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f852ed7-7e27-4ec6-a972-40313614b169 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.545854] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95d6e3a-61dc-4ee9-81b5-52efc84ed997 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.576528] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceccf50-84a5-48bd-a8bb-898ed8228f26 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.584578] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6fb7f9-0784-4db9-8794-e635dc15f322 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.598260] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.607560] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.622297] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1892.622477] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.322s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.619075] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.619075] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.619075] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1894.619075] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1894.639368] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.639557] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.639683] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.639758] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.639883] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.640012] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.640140] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.640261] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.640377] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.640494] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1894.640649] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1895.220024] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1896.219632] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1898.220029] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.220383] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.220840] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.220840] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1909.221134] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1926.841319] env[62346]: WARNING oslo_vmware.rw_handles [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1926.841319] env[62346]: ERROR oslo_vmware.rw_handles [ 1926.842146] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1926.843857] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1926.844128] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Copying Virtual Disk [datastore2] vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/824fc98a-2975-47c5-a73c-1b2cf9c48ccb/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1926.844415] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be81b3e0-c97f-4dc1-9a14-d56168bff844 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.852429] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Waiting for the task: (returnval){ [ 1926.852429] env[62346]: value = "task-4891785" [ 1926.852429] env[62346]: _type = "Task" [ 1926.852429] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.860928] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Task: {'id': task-4891785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.362233] env[62346]: DEBUG oslo_vmware.exceptions [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1927.362529] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.363118] env[62346]: ERROR nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1927.363118] env[62346]: Faults: ['InvalidArgument'] [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Traceback (most recent call last): [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] yield resources [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self.driver.spawn(context, instance, image_meta, [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self._fetch_image_if_missing(context, vi) [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] image_cache(vi, tmp_image_ds_loc) [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] vm_util.copy_virtual_disk( [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] session._wait_for_task(vmdk_copy_task) [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] return self.wait_for_task(task_ref) [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] return evt.wait() [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] result = hub.switch() [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] return self.greenlet.switch() [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self.f(*self.args, **self.kw) [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] raise exceptions.translate_fault(task_info.error) [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Faults: ['InvalidArgument'] [ 1927.363118] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] [ 1927.364232] env[62346]: INFO nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Terminating instance [ 1927.364980] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.366212] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.366879] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1927.367083] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1927.367319] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56828950-a71c-4e99-9e64-9178fc000086 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.369923] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c54a3e-a398-43c5-8198-b03f8478327c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.376966] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1927.377251] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8dbd7be2-ab5e-4176-8992-b121c924ebdd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.379532] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.379705] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1927.380665] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2f0eeac-9fdf-4492-b9e8-00eeca80d7f3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.385904] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 1927.385904] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5220e464-3d3d-50f5-6a0c-23ac06a3cecc" [ 1927.385904] env[62346]: _type = "Task" [ 1927.385904] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.394348] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5220e464-3d3d-50f5-6a0c-23ac06a3cecc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.449794] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1927.450147] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1927.450356] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Deleting the datastore file [datastore2] 8979ed84-fa1d-49a1-9f00-844d0b0f604a {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1927.450619] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4533401d-643d-48dd-8be5-e687878c30f7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.457154] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Waiting for the task: (returnval){ [ 1927.457154] env[62346]: value = "task-4891787" [ 1927.457154] env[62346]: _type = "Task" [ 1927.457154] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.896916] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1927.897305] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating directory with path [datastore2] vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.897430] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8386fe8e-5abd-4f33-887d-0fd637c8249d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.909798] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created directory with path [datastore2] vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.910043] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Fetch image to [datastore2] vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1927.910196] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1927.910993] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef92d46-5bef-4354-a826-75e4218a3283 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.918524] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0c97ed-a023-4b97-8691-e35578609b44 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.928083] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4da5f8-530c-4984-9e99-6abceb9ccc17 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.961660] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0cd785-afeb-4b88-aa06-f25b3807e7c1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.971083] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eacbdfb4-0379-494c-957b-449a824eefa5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.973009] env[62346]: DEBUG oslo_vmware.api [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Task: {'id': task-4891787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073834} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.973266] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1927.973450] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1927.973623] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1927.973801] env[62346]: INFO nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1927.975982] env[62346]: DEBUG nova.compute.claims [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1927.976176] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.976391] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.011697] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1928.075297] env[62346]: DEBUG oslo_vmware.rw_handles [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1928.165955] env[62346]: DEBUG oslo_vmware.rw_handles [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1928.166308] env[62346]: DEBUG oslo_vmware.rw_handles [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1928.270474] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecff147-a05b-40ec-ac91-19788640c6e6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.279297] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2021a963-ae77-4f64-880f-4b468f7f6825 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.312972] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f503cc1-fdff-440d-b6af-034606adb14e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.321983] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6645c71d-0953-4da8-a260-d9977e2a0351 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.338394] env[62346]: DEBUG nova.compute.provider_tree [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1928.350328] env[62346]: DEBUG nova.scheduler.client.report [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1928.366869] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.390s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.367458] env[62346]: ERROR nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1928.367458] env[62346]: Faults: ['InvalidArgument'] [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Traceback (most recent call last): [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self.driver.spawn(context, instance, image_meta, [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self._fetch_image_if_missing(context, vi) [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] image_cache(vi, tmp_image_ds_loc) [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] vm_util.copy_virtual_disk( [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] session._wait_for_task(vmdk_copy_task) [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] return self.wait_for_task(task_ref) [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] return evt.wait() [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] result = hub.switch() [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] return self.greenlet.switch() [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] self.f(*self.args, **self.kw) [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] raise exceptions.translate_fault(task_info.error) [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Faults: ['InvalidArgument'] [ 1928.367458] env[62346]: ERROR nova.compute.manager [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] [ 1928.368858] env[62346]: DEBUG nova.compute.utils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1928.369982] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Build of instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a was re-scheduled: A specified parameter was not correct: fileType [ 1928.369982] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1928.370407] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1928.370598] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1928.370787] env[62346]: DEBUG nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1928.371011] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1928.876709] env[62346]: DEBUG nova.network.neutron [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.888633] env[62346]: INFO nova.compute.manager [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Took 0.52 seconds to deallocate network for instance. [ 1929.015579] env[62346]: INFO nova.scheduler.client.report [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Deleted allocations for instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a [ 1929.042766] env[62346]: DEBUG oslo_concurrency.lockutils [None req-2b073759-60d4-41f0-a02f-89b964c336ee tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.934s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.045030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.660s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.045030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Acquiring lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.045030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.045030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.047052] env[62346]: INFO nova.compute.manager [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Terminating instance [ 1929.048942] env[62346]: DEBUG nova.compute.manager [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1929.049162] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1929.049835] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-687dc012-f3c0-49d6-9c7f-9dad646b5f38 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.062075] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e4ddbb-2cb6-42d1-b26b-fc636c6a0518 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.076092] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1929.103073] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8979ed84-fa1d-49a1-9f00-844d0b0f604a could not be found. [ 1929.103277] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1929.103481] env[62346]: INFO nova.compute.manager [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1929.103737] env[62346]: DEBUG oslo.service.loopingcall [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.103985] env[62346]: DEBUG nova.compute.manager [-] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1929.104098] env[62346]: DEBUG nova.network.neutron [-] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1929.129626] env[62346]: DEBUG nova.network.neutron [-] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.137708] env[62346]: INFO nova.compute.manager [-] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] Took 0.03 seconds to deallocate network for instance. [ 1929.138780] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.139053] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.140574] env[62346]: INFO nova.compute.claims [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1929.247914] env[62346]: DEBUG oslo_concurrency.lockutils [None req-66c01dfb-c304-431a-9f07-0e5e205c2367 tempest-ImagesOneServerTestJSON-1506493811 tempest-ImagesOneServerTestJSON-1506493811-project-member] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.248793] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 184.560s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.249074] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 8979ed84-fa1d-49a1-9f00-844d0b0f604a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1929.249259] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "8979ed84-fa1d-49a1-9f00-844d0b0f604a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.349038] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7202d554-a407-477a-b178-2571ed7ef1c2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.357718] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e20657-78d7-42ae-9391-7f8da7cde148 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.390644] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83923d5f-55e7-46fc-bfef-b4e9443a828f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.399760] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6746fb-f13e-499e-9db2-1bf3d23f8a3a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.414541] env[62346]: DEBUG nova.compute.provider_tree [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1929.426960] env[62346]: DEBUG nova.scheduler.client.report [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1929.443741] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.444266] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1929.484238] env[62346]: DEBUG nova.compute.utils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1929.486413] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1929.486676] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1929.498323] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1929.559257] env[62346]: DEBUG nova.policy [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9eb444699bfe4137a12b88f71543f185', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20adb521b1574b8581a0c368923e38eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 1929.566998] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1929.592787] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1929.593048] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1929.593210] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.593395] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1929.593542] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.593688] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1929.593895] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1929.594070] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1929.594243] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1929.594403] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1929.594574] env[62346]: DEBUG nova.virt.hardware [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1929.595458] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ef86c4-9c2c-49b0-865d-5ba6e0e71221 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.606017] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af2dbc5-d8a4-49eb-b5a7-fa6904ea89d4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.040025] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Successfully created port: 045f619b-fdee-4f1b-af16-9aeb4c22e666 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1930.738234] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Successfully updated port: 045f619b-fdee-4f1b-af16-9aeb4c22e666 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1930.749978] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "refresh_cache-fbaf20c0-294c-4e37-b0f4-ee432f00c911" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.749978] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "refresh_cache-fbaf20c0-294c-4e37-b0f4-ee432f00c911" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.750144] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1930.801336] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1931.235858] env[62346]: DEBUG nova.compute.manager [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Received event network-vif-plugged-045f619b-fdee-4f1b-af16-9aeb4c22e666 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1931.236142] env[62346]: DEBUG oslo_concurrency.lockutils [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] Acquiring lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.236737] env[62346]: DEBUG oslo_concurrency.lockutils [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.236921] env[62346]: DEBUG oslo_concurrency.lockutils [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.241251] env[62346]: DEBUG nova.compute.manager [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] No waiting events found dispatching network-vif-plugged-045f619b-fdee-4f1b-af16-9aeb4c22e666 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1931.241512] env[62346]: WARNING nova.compute.manager [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Received unexpected event network-vif-plugged-045f619b-fdee-4f1b-af16-9aeb4c22e666 for instance with vm_state building and task_state spawning. [ 1931.241688] env[62346]: DEBUG nova.compute.manager [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Received event network-changed-045f619b-fdee-4f1b-af16-9aeb4c22e666 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1931.241851] env[62346]: DEBUG nova.compute.manager [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Refreshing instance network info cache due to event network-changed-045f619b-fdee-4f1b-af16-9aeb4c22e666. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1931.242055] env[62346]: DEBUG oslo_concurrency.lockutils [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] Acquiring lock "refresh_cache-fbaf20c0-294c-4e37-b0f4-ee432f00c911" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.264199] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Updating instance_info_cache with network_info: [{"id": "045f619b-fdee-4f1b-af16-9aeb4c22e666", "address": "fa:16:3e:4f:ef:19", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap045f619b-fd", "ovs_interfaceid": "045f619b-fdee-4f1b-af16-9aeb4c22e666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.277220] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "refresh_cache-fbaf20c0-294c-4e37-b0f4-ee432f00c911" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.277513] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance network_info: |[{"id": "045f619b-fdee-4f1b-af16-9aeb4c22e666", "address": "fa:16:3e:4f:ef:19", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap045f619b-fd", "ovs_interfaceid": "045f619b-fdee-4f1b-af16-9aeb4c22e666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1931.277830] env[62346]: DEBUG oslo_concurrency.lockutils [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] Acquired lock "refresh_cache-fbaf20c0-294c-4e37-b0f4-ee432f00c911" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.278036] env[62346]: DEBUG nova.network.neutron [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Refreshing network info cache for port 045f619b-fdee-4f1b-af16-9aeb4c22e666 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1931.279092] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:ef:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '045f619b-fdee-4f1b-af16-9aeb4c22e666', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1931.286575] env[62346]: DEBUG oslo.service.loopingcall [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.287453] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1931.289738] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20415ab6-42a8-4042-888c-e3e3dca393c4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.312508] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1931.312508] env[62346]: value = "task-4891788" [ 1931.312508] env[62346]: _type = "Task" [ 1931.312508] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.321740] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891788, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.632245] env[62346]: DEBUG nova.network.neutron [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Updated VIF entry in instance network info cache for port 045f619b-fdee-4f1b-af16-9aeb4c22e666. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1931.632721] env[62346]: DEBUG nova.network.neutron [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Updating instance_info_cache with network_info: [{"id": "045f619b-fdee-4f1b-af16-9aeb4c22e666", "address": "fa:16:3e:4f:ef:19", "network": {"id": "6c17c99b-320f-4665-9e5f-cb39d735297b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-46618442-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20adb521b1574b8581a0c368923e38eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap045f619b-fd", "ovs_interfaceid": "045f619b-fdee-4f1b-af16-9aeb4c22e666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.643089] env[62346]: DEBUG oslo_concurrency.lockutils [req-210938c2-2968-4b3d-8e69-21d1454717b8 req-36860616-1ff4-4dea-a1c6-be7515c3b87b service nova] Releasing lock "refresh_cache-fbaf20c0-294c-4e37-b0f4-ee432f00c911" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.823683] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891788, 'name': CreateVM_Task, 'duration_secs': 0.335534} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.823870] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1931.824477] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.824646] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.825034] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1931.825308] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f22da806-1002-4af7-bddb-a194fe631286 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.829793] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 1931.829793] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52527993-a1c8-5040-1192-a34c533fbf39" [ 1931.829793] env[62346]: _type = "Task" [ 1931.829793] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.837791] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52527993-a1c8-5040-1192-a34c533fbf39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.340838] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.341232] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1932.341290] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.190413] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.220509] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.232594] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.232810] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.232984] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.233188] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1952.234646] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50677fa1-da3f-4a8d-a142-a5afcf7f0293 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.243471] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d7b94c-dd5a-4ad9-a7ac-bcc96f00d599 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.257960] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6c54c6-04e4-4ef6-b6e3-2ece1dea9a55 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.264998] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0a3978-a796-44ba-ae27-50e7ba68e0a7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.295545] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180574MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1952.295714] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.295891] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.090521] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance daef9d9c-03a6-4ee8-9806-9d895f802776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090521] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090521] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090521] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090924] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090924] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090924] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.090924] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.091133] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.091133] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.091307] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1953.091466] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '82', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '6', 'num_os_type_None': '10', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '2', 'io_workload': '10', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '2', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'num_task_spawning': '4', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1953.224530] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa56bd87-f5fc-4ea9-915d-20595e6a13ac {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.232435] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6dfea7-d984-4fbb-bf7b-a1351f6b4efd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.261889] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd672b0e-958d-41c1-8b80-f9ff513a9344 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.270017] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62f0b02-f412-4064-95df-2992deb4b0de {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.283484] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1953.292980] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1953.307317] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1953.307506] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.012s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.303485] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.303913] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.303913] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1955.304034] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1955.324378] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.324535] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.324664] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.324790] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.324912] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.325042] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.325164] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.325290] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.325409] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.325524] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.325641] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1956.220106] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.219550] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.221121] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1960.220800] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.220100] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.220440] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1969.216760] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1970.220455] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.685570] env[62346]: WARNING oslo_vmware.rw_handles [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1977.685570] env[62346]: ERROR oslo_vmware.rw_handles [ 1977.686419] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1977.689120] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1977.689489] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Copying Virtual Disk [datastore2] vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/00dfa3ca-6f6d-4884-b37e-e2e2cbcad9cb/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1977.689874] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52bde440-9f65-4225-b064-a1494623c61c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.699797] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 1977.699797] env[62346]: value = "task-4891789" [ 1977.699797] env[62346]: _type = "Task" [ 1977.699797] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.712074] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': task-4891789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.211128] env[62346]: DEBUG oslo_vmware.exceptions [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1978.211498] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.212149] env[62346]: ERROR nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1978.212149] env[62346]: Faults: ['InvalidArgument'] [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Traceback (most recent call last): [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] yield resources [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self.driver.spawn(context, instance, image_meta, [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self._fetch_image_if_missing(context, vi) [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] image_cache(vi, tmp_image_ds_loc) [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] vm_util.copy_virtual_disk( [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] session._wait_for_task(vmdk_copy_task) [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] return self.wait_for_task(task_ref) [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] return evt.wait() [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] result = hub.switch() [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] return self.greenlet.switch() [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self.f(*self.args, **self.kw) [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] raise exceptions.translate_fault(task_info.error) [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Faults: ['InvalidArgument'] [ 1978.212149] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] [ 1978.213161] env[62346]: INFO nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Terminating instance [ 1978.215282] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.215503] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1978.216217] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1978.216415] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1978.216653] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc444be0-b510-4579-8b0b-ec78b3b23adf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.219259] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99848051-f1e2-4c35-bf06-54f806f2f1f5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.227769] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1978.229057] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b9e3ae5-9c6e-474a-8f91-18c364ab1bc2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.230771] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1978.230974] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1978.231806] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ed353f4-68ce-4160-bf29-2faa75f8e0c1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.238520] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Waiting for the task: (returnval){ [ 1978.238520] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52060795-8693-53ab-bcc0-db972727d4dc" [ 1978.238520] env[62346]: _type = "Task" [ 1978.238520] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.247865] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52060795-8693-53ab-bcc0-db972727d4dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.315343] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1978.315583] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1978.315762] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Deleting the datastore file [datastore2] daef9d9c-03a6-4ee8-9806-9d895f802776 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1978.316056] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afac97cb-8a61-4a7b-85da-76255a7a95b6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.324779] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 1978.324779] env[62346]: value = "task-4891791" [ 1978.324779] env[62346]: _type = "Task" [ 1978.324779] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.335126] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': task-4891791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.749180] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1978.749632] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Creating directory with path [datastore2] vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1978.749767] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbb1b98e-78d8-4e20-aec2-6b81cac5b978 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.762681] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Created directory with path [datastore2] vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1978.762906] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Fetch image to [datastore2] vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1978.763076] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1978.763872] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbcf42a-5e59-42bc-861c-e8604bd7217f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.771575] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f4b60a-61f9-4704-b7b2-d32f391fe972 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.781423] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05628dbb-fa02-44a8-a606-9c92d306ef5a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.846635] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7212d80c-e2a3-4924-9525-88b9024aadd5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.861565] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1e390095-bdd5-4316-bb13-194784fb9a0e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.864616] env[62346]: DEBUG oslo_vmware.api [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': task-4891791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070315} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.864994] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1978.865288] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1978.865542] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1978.865804] env[62346]: INFO nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1978.869466] env[62346]: DEBUG nova.compute.claims [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1978.869737] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.870094] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.895924] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1979.073352] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1979.132299] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08077981-474d-4365-ad5d-f560673b4854 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.137673] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1979.137906] env[62346]: DEBUG oslo_vmware.rw_handles [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1979.142057] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884f736b-e46f-48e7-8c7a-71606396a96f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.172246] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ef5505-825e-4a67-b96a-3de1de3ad095 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.179871] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32b8b61-a643-4689-a005-56447656cbb1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.195017] env[62346]: DEBUG nova.compute.provider_tree [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.204363] env[62346]: DEBUG nova.scheduler.client.report [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1979.221028] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.351s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.221664] env[62346]: ERROR nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1979.221664] env[62346]: Faults: ['InvalidArgument'] [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Traceback (most recent call last): [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self.driver.spawn(context, instance, image_meta, [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self._fetch_image_if_missing(context, vi) [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] image_cache(vi, tmp_image_ds_loc) [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] vm_util.copy_virtual_disk( [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] session._wait_for_task(vmdk_copy_task) [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] return self.wait_for_task(task_ref) [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] return evt.wait() [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] result = hub.switch() [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] return self.greenlet.switch() [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] self.f(*self.args, **self.kw) [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] raise exceptions.translate_fault(task_info.error) [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Faults: ['InvalidArgument'] [ 1979.221664] env[62346]: ERROR nova.compute.manager [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] [ 1979.222467] env[62346]: DEBUG nova.compute.utils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1979.223968] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Build of instance daef9d9c-03a6-4ee8-9806-9d895f802776 was re-scheduled: A specified parameter was not correct: fileType [ 1979.223968] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1979.224355] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1979.224524] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1979.224694] env[62346]: DEBUG nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1979.224857] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1979.762318] env[62346]: DEBUG nova.network.neutron [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.775294] env[62346]: INFO nova.compute.manager [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Took 0.55 seconds to deallocate network for instance. [ 1979.872505] env[62346]: INFO nova.scheduler.client.report [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Deleted allocations for instance daef9d9c-03a6-4ee8-9806-9d895f802776 [ 1979.921351] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7210a741-e998-4a41-b542-44d065ba25b9 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.206s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.922271] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.508s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.922533] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "daef9d9c-03a6-4ee8-9806-9d895f802776-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.923217] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.923641] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.926515] env[62346]: INFO nova.compute.manager [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Terminating instance [ 1979.928945] env[62346]: DEBUG nova.compute.manager [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1979.929331] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1979.929706] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-484bc586-69b6-40d6-ab31-cde3a604402e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.940162] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289852ce-0c3e-4da0-ab34-a05c31b13a8b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.976824] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance daef9d9c-03a6-4ee8-9806-9d895f802776 could not be found. [ 1979.977185] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1979.977743] env[62346]: INFO nova.compute.manager [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1979.978040] env[62346]: DEBUG oslo.service.loopingcall [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1979.978487] env[62346]: DEBUG nova.compute.manager [-] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1979.978695] env[62346]: DEBUG nova.network.neutron [-] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1980.006424] env[62346]: DEBUG nova.network.neutron [-] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.018786] env[62346]: INFO nova.compute.manager [-] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] Took 0.04 seconds to deallocate network for instance. [ 1980.156360] env[62346]: DEBUG oslo_concurrency.lockutils [None req-f803864b-062d-4465-a472-2c300759ee23 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.234s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.157542] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 235.468s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.157805] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: daef9d9c-03a6-4ee8-9806-9d895f802776] During sync_power_state the instance has a pending task (deleting). Skip. [ 1980.158015] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "daef9d9c-03a6-4ee8-9806-9d895f802776" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.408180] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "ca0f017f-3bca-401f-8e70-83a7a5061116" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.901265] env[62346]: DEBUG oslo_concurrency.lockutils [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.221221] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2014.233745] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.233982] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.234171] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.234351] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2014.235488] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3d07da-7176-499b-8841-cd38ea47eef9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.244445] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0535185b-5646-4091-a6cb-55575be062ee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.259014] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808000a4-7929-4b60-944e-0e33a999f932 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.266143] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f8f0cc-606d-472e-afd5-cff4a6b699a6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.297160] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180577MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2014.297347] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.297557] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.372131] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 5af6a907-80d7-4630-aa01-c600e4908d32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.372321] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.372485] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.372655] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.372827] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.373020] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.373202] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.373357] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.373517] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.373753] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2014.373947] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '83', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '7', 'num_os_type_None': '9', 'num_proj_c344afa6e71c4cc78e746bb53d7d4acc': '1', 'io_workload': '9', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '2', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_task_spawning': '2', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2014.493939] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24682608-59a8-42cf-8e1c-91a32f2e9173 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.501934] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8dbb98-6bc4-4633-8a36-596bbd0bb89b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.532018] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea50738b-2a19-43f2-aa3b-1136d6d9b46d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.540313] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e825a8-b6f5-49ae-8951-2a382487528f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.553802] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2014.563671] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2014.578397] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2014.578397] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.281s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.572616] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.572977] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.220413] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.220605] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2017.220701] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2017.240117] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.240323] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.240483] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.240609] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.240722] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.240841] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.240963] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.241097] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.241222] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.241366] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2018.219865] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.220303] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.220642] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2019.229756] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] There are 0 instances to clean {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2020.230092] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.220299] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.220540] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.220692] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2025.221028] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.001541] env[62346]: WARNING oslo_vmware.rw_handles [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2026.001541] env[62346]: ERROR oslo_vmware.rw_handles [ 2026.001962] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2026.003824] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2026.004074] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Copying Virtual Disk [datastore2] vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/948fc492-64a8-4e07-a22c-d61ffdf83964/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2026.004363] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-449f377b-4286-4c31-bf3d-ff79155ba214 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.011877] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Waiting for the task: (returnval){ [ 2026.011877] env[62346]: value = "task-4891792" [ 2026.011877] env[62346]: _type = "Task" [ 2026.011877] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.020284] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Task: {'id': task-4891792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.522314] env[62346]: DEBUG oslo_vmware.exceptions [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2026.522668] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.523139] env[62346]: ERROR nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2026.523139] env[62346]: Faults: ['InvalidArgument'] [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Traceback (most recent call last): [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] yield resources [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self.driver.spawn(context, instance, image_meta, [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self._fetch_image_if_missing(context, vi) [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] image_cache(vi, tmp_image_ds_loc) [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] vm_util.copy_virtual_disk( [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] session._wait_for_task(vmdk_copy_task) [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] return self.wait_for_task(task_ref) [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] return evt.wait() [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] result = hub.switch() [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] return self.greenlet.switch() [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self.f(*self.args, **self.kw) [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] raise exceptions.translate_fault(task_info.error) [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Faults: ['InvalidArgument'] [ 2026.523139] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] [ 2026.524444] env[62346]: INFO nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Terminating instance [ 2026.525079] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.525288] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2026.525531] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a98ec559-c5a9-403f-b051-eb8817849cbb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.527777] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2026.527974] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2026.528696] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43f157a-fd6e-4a5d-92bd-94e48638ba6a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.536079] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2026.536354] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d69ec6cc-3fac-4a62-9d6f-dfba1780304d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.538570] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2026.538743] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2026.539709] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee2ff068-8925-4f02-87f5-329b44708705 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.545120] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 2026.545120] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ccfaa3-89ed-44f3-08a1-87fc670188aa" [ 2026.545120] env[62346]: _type = "Task" [ 2026.545120] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.553113] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ccfaa3-89ed-44f3-08a1-87fc670188aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.618020] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2026.618330] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2026.618593] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Deleting the datastore file [datastore2] 5af6a907-80d7-4630-aa01-c600e4908d32 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2026.618924] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbed9fff-f166-40a2-8d19-6130535698a4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.625794] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Waiting for the task: (returnval){ [ 2026.625794] env[62346]: value = "task-4891794" [ 2026.625794] env[62346]: _type = "Task" [ 2026.625794] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.634113] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Task: {'id': task-4891794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.056165] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2027.056448] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating directory with path [datastore2] vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2027.056693] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c95c1d0-b09d-408b-8b71-43db6ec17dbf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.068327] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Created directory with path [datastore2] vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2027.068512] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Fetch image to [datastore2] vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2027.068681] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2027.069416] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a4c558-90e5-48ce-b77a-16ac814ff8e1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.076053] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b6cac6-9a80-4106-b01c-25cc06e6e799 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.085206] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95db5dda-4fb8-423f-89e6-e7d14b158e22 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.116182] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e676e35a-e260-4140-9f08-4907d291cad8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.121979] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d8bb1718-6bf8-4ca9-849d-a25b019952a3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.134358] env[62346]: DEBUG oslo_vmware.api [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Task: {'id': task-4891794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064207} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.134615] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2027.134799] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2027.134966] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2027.135163] env[62346]: INFO nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2027.137280] env[62346]: DEBUG nova.compute.claims [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2027.137474] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.137689] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.147268] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2027.249693] env[62346]: DEBUG oslo_vmware.rw_handles [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2027.312025] env[62346]: DEBUG oslo_vmware.rw_handles [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2027.312255] env[62346]: DEBUG oslo_vmware.rw_handles [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2027.355388] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d88287b-903f-4bf9-a056-62013d7d098f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.363571] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1a0a80-e997-4267-8dc5-096fe282e293 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.392902] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25ccb26-aafc-41a9-9c3f-0d1f6ad47008 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.400800] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a32da60-da95-4b6a-b49c-e83f1a8bd3cf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.415622] env[62346]: DEBUG nova.compute.provider_tree [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.425403] env[62346]: DEBUG nova.scheduler.client.report [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2027.442584] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.443138] env[62346]: ERROR nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2027.443138] env[62346]: Faults: ['InvalidArgument'] [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Traceback (most recent call last): [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self.driver.spawn(context, instance, image_meta, [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self._fetch_image_if_missing(context, vi) [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] image_cache(vi, tmp_image_ds_loc) [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] vm_util.copy_virtual_disk( [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] session._wait_for_task(vmdk_copy_task) [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] return self.wait_for_task(task_ref) [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] return evt.wait() [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] result = hub.switch() [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] return self.greenlet.switch() [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] self.f(*self.args, **self.kw) [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] raise exceptions.translate_fault(task_info.error) [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Faults: ['InvalidArgument'] [ 2027.443138] env[62346]: ERROR nova.compute.manager [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] [ 2027.443943] env[62346]: DEBUG nova.compute.utils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2027.445392] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Build of instance 5af6a907-80d7-4630-aa01-c600e4908d32 was re-scheduled: A specified parameter was not correct: fileType [ 2027.445392] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2027.445761] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2027.445931] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2027.446116] env[62346]: DEBUG nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2027.446281] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2028.078964] env[62346]: DEBUG nova.network.neutron [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.095199] env[62346]: INFO nova.compute.manager [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Took 0.65 seconds to deallocate network for instance. [ 2028.193627] env[62346]: INFO nova.scheduler.client.report [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Deleted allocations for instance 5af6a907-80d7-4630-aa01-c600e4908d32 [ 2028.215960] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e42390cd-6dc5-455a-a8b8-5d22d8513bb5 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "5af6a907-80d7-4630-aa01-c600e4908d32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.195s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.216389] env[62346]: DEBUG oslo_concurrency.lockutils [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "5af6a907-80d7-4630-aa01-c600e4908d32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.919s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.216620] env[62346]: DEBUG oslo_concurrency.lockutils [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Acquiring lock "5af6a907-80d7-4630-aa01-c600e4908d32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.216826] env[62346]: DEBUG oslo_concurrency.lockutils [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "5af6a907-80d7-4630-aa01-c600e4908d32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.216994] env[62346]: DEBUG oslo_concurrency.lockutils [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "5af6a907-80d7-4630-aa01-c600e4908d32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.219485] env[62346]: INFO nova.compute.manager [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Terminating instance [ 2028.221417] env[62346]: DEBUG nova.compute.manager [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2028.221597] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2028.222107] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9772820-7d96-4101-9ece-15639f3bacdd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.227477] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.227637] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances with incomplete migration {{(pid=62346) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2028.233500] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3760979-36bc-4036-8f04-f5cb4a02dbc8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.267796] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5af6a907-80d7-4630-aa01-c600e4908d32 could not be found. [ 2028.268031] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2028.268232] env[62346]: INFO nova.compute.manager [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2028.268481] env[62346]: DEBUG oslo.service.loopingcall [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.268729] env[62346]: DEBUG nova.compute.manager [-] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2028.268825] env[62346]: DEBUG nova.network.neutron [-] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2028.294605] env[62346]: DEBUG nova.network.neutron [-] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.303901] env[62346]: INFO nova.compute.manager [-] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] Took 0.03 seconds to deallocate network for instance. [ 2028.394061] env[62346]: DEBUG oslo_concurrency.lockutils [None req-99dad1a1-4ced-47b8-b37b-a0817237bf14 tempest-ServerDiskConfigTestJSON-602794259 tempest-ServerDiskConfigTestJSON-602794259-project-member] Lock "5af6a907-80d7-4630-aa01-c600e4908d32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.394994] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "5af6a907-80d7-4630-aa01-c600e4908d32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 283.706s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.395205] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 5af6a907-80d7-4630-aa01-c600e4908d32] During sync_power_state the instance has a pending task (deleting). Skip. [ 2028.395374] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "5af6a907-80d7-4630-aa01-c600e4908d32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.238126] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.810202] env[62346]: DEBUG oslo_concurrency.lockutils [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.221078] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.232261] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.232479] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.232644] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.232854] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2075.234321] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc95584b-702a-429a-95b7-e1a0a51b13ba {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.243084] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07207ccb-5226-4adf-83e2-32c53fae5a3a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.257967] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3856ccb-2c78-4489-a6c3-aca88e3dabe5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.264313] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3190c9-e063-4e84-997b-1b00db564075 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.295055] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180577MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2075.295055] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.295055] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.446749] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.446940] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447120] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447256] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447380] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447501] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447618] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447733] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2075.447927] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2075.448090] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] stats={'failed_builds': '84', 'num_instances': '8', 'num_vm_building': '8', 'num_task_deleting': '7', 'num_os_type_None': '8', 'num_proj_5f45e49e839f4cafaea598ac8f5fbd2f': '1', 'io_workload': '8', 'num_proj_20adb521b1574b8581a0c368923e38eb': '2', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1', 'num_task_spawning': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2075.465762] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing inventories for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2075.481601] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating ProviderTree inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2075.481845] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2075.493630] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing aggregate associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, aggregates: None {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2075.512631] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing trait associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2075.619089] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a95001-de30-40ac-bcfb-9ab673265220 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.626828] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d6edef-dd74-44f1-b42e-0128dce9d5ab {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.656266] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e98940c-2806-4c3b-9874-41623319d4d6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.664054] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39a63e6-2577-4204-9b92-59bd41a177c7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.676904] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.685857] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2075.703024] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2075.703213] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.408s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.698524] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.898041] env[62346]: WARNING oslo_vmware.rw_handles [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2076.898041] env[62346]: ERROR oslo_vmware.rw_handles [ 2076.898041] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2076.899556] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2076.899867] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Copying Virtual Disk [datastore2] vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/52bb2285-bf99-4d49-9794-7dbec8f9f36a/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2076.900209] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41e30cc8-22a3-4e51-adf5-dc0c3334af9d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.907902] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 2076.907902] env[62346]: value = "task-4891795" [ 2076.907902] env[62346]: _type = "Task" [ 2076.907902] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.917205] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': task-4891795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.418148] env[62346]: DEBUG oslo_vmware.exceptions [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2077.418444] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.419010] env[62346]: ERROR nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.419010] env[62346]: Faults: ['InvalidArgument'] [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Traceback (most recent call last): [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] yield resources [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self.driver.spawn(context, instance, image_meta, [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self._fetch_image_if_missing(context, vi) [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] image_cache(vi, tmp_image_ds_loc) [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] vm_util.copy_virtual_disk( [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] session._wait_for_task(vmdk_copy_task) [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] return self.wait_for_task(task_ref) [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] return evt.wait() [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] result = hub.switch() [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] return self.greenlet.switch() [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self.f(*self.args, **self.kw) [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] raise exceptions.translate_fault(task_info.error) [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Faults: ['InvalidArgument'] [ 2077.419010] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] [ 2077.419777] env[62346]: INFO nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Terminating instance [ 2077.420933] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.421159] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.421398] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7fbd9e86-277a-4666-a735-a8130367b662 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.423633] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2077.423836] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2077.424572] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3be6389-63c4-4188-ae95-dcf5387a0b51 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.431691] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2077.431936] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41edc209-0bb6-4c97-825e-8b999a348171 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.434227] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.434398] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2077.435354] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b02d3f-204a-434a-a5e3-6f648c515aad {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.440259] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 2077.440259] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f07f01-bc79-1230-25a5-0090b84e938f" [ 2077.440259] env[62346]: _type = "Task" [ 2077.440259] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.448078] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52f07f01-bc79-1230-25a5-0090b84e938f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.500719] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2077.500997] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2077.501135] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Deleting the datastore file [datastore2] 87c6dc89-e89b-4c72-b29c-16751a749d29 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2077.501415] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a1b5cd4-6e27-40cb-be07-bf32836d9315 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.508328] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for the task: (returnval){ [ 2077.508328] env[62346]: value = "task-4891797" [ 2077.508328] env[62346]: _type = "Task" [ 2077.508328] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.516674] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': task-4891797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.950531] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2077.950959] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating directory with path [datastore2] vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.951035] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-771471ea-2102-4a6b-b8b6-2613baa8fb91 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.962766] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created directory with path [datastore2] vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.962990] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Fetch image to [datastore2] vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2077.963141] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2077.963893] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d6ea43-cdf4-4cff-bb45-71bee84c6cc2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.970919] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd36953e-518d-460d-8334-10eed92a2efd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.980100] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e2343c-b328-4045-8301-d61514a0be11 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.014211] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cf03ca-de90-4aa7-8d97-9bf7001571f4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.021721] env[62346]: DEBUG oslo_vmware.api [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Task: {'id': task-4891797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080023} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.023809] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2078.024166] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2078.024491] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2078.024767] env[62346]: INFO nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2078.026636] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-195fc62c-a320-45e4-a03f-d4ba29b07d57 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.028602] env[62346]: DEBUG nova.compute.claims [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2078.028778] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.028990] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.053981] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2078.128087] env[62346]: DEBUG oslo_vmware.rw_handles [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2078.192984] env[62346]: DEBUG oslo_vmware.rw_handles [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2078.193218] env[62346]: DEBUG oslo_vmware.rw_handles [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2078.219695] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.219863] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2078.219986] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2078.237025] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237192] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237336] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237448] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237569] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237688] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237806] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2078.237976] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2078.239284] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.240121] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c922de8-a7c2-4e4f-905e-366be4e8eb81 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.248895] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5003ad0-3104-4df3-b173-528866bbfe4d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.278551] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a0684f-966e-4f95-9f64-ea6eb8badba8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.285663] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f25480-4833-4cac-97fd-9a285ccb57dc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.298876] env[62346]: DEBUG nova.compute.provider_tree [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2078.308536] env[62346]: DEBUG nova.scheduler.client.report [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2078.323522] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.294s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.324089] env[62346]: ERROR nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2078.324089] env[62346]: Faults: ['InvalidArgument'] [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Traceback (most recent call last): [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self.driver.spawn(context, instance, image_meta, [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self._fetch_image_if_missing(context, vi) [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] image_cache(vi, tmp_image_ds_loc) [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] vm_util.copy_virtual_disk( [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] session._wait_for_task(vmdk_copy_task) [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] return self.wait_for_task(task_ref) [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] return evt.wait() [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] result = hub.switch() [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] return self.greenlet.switch() [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] self.f(*self.args, **self.kw) [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] raise exceptions.translate_fault(task_info.error) [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Faults: ['InvalidArgument'] [ 2078.324089] env[62346]: ERROR nova.compute.manager [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] [ 2078.324971] env[62346]: DEBUG nova.compute.utils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2078.326259] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Build of instance 87c6dc89-e89b-4c72-b29c-16751a749d29 was re-scheduled: A specified parameter was not correct: fileType [ 2078.326259] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2078.326642] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2078.326814] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2078.326987] env[62346]: DEBUG nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2078.327171] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2078.709973] env[62346]: DEBUG nova.network.neutron [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.722197] env[62346]: INFO nova.compute.manager [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Took 0.39 seconds to deallocate network for instance. [ 2078.818431] env[62346]: INFO nova.scheduler.client.report [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Deleted allocations for instance 87c6dc89-e89b-4c72-b29c-16751a749d29 [ 2078.864230] env[62346]: DEBUG oslo_concurrency.lockutils [None req-224f483d-5bc7-4fdd-a2ab-f211f541b244 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.494s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.864493] env[62346]: DEBUG oslo_concurrency.lockutils [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.047s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.864701] env[62346]: DEBUG oslo_concurrency.lockutils [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Acquiring lock "87c6dc89-e89b-4c72-b29c-16751a749d29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.864901] env[62346]: DEBUG oslo_concurrency.lockutils [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.865085] env[62346]: DEBUG oslo_concurrency.lockutils [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.867253] env[62346]: INFO nova.compute.manager [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Terminating instance [ 2078.869025] env[62346]: DEBUG nova.compute.manager [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2078.869218] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2078.869760] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e26e8f2d-ed28-4972-b57a-25fad6312ff2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.879603] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b4e29d-d37b-45fb-99d7-934d3635de0a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.910691] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87c6dc89-e89b-4c72-b29c-16751a749d29 could not be found. [ 2078.910963] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2078.911174] env[62346]: INFO nova.compute.manager [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2078.911474] env[62346]: DEBUG oslo.service.loopingcall [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2078.911756] env[62346]: DEBUG nova.compute.manager [-] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2078.911882] env[62346]: DEBUG nova.network.neutron [-] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2078.945144] env[62346]: DEBUG nova.network.neutron [-] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.953963] env[62346]: INFO nova.compute.manager [-] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] Took 0.04 seconds to deallocate network for instance. [ 2079.057437] env[62346]: DEBUG oslo_concurrency.lockutils [None req-61c03576-606a-4731-8321-e2f50b802a32 tempest-SecurityGroupsTestJSON-252965053 tempest-SecurityGroupsTestJSON-252965053-project-member] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.058314] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 334.369s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.058516] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 87c6dc89-e89b-4c72-b29c-16751a749d29] During sync_power_state the instance has a pending task (deleting). Skip. [ 2079.058698] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "87c6dc89-e89b-4c72-b29c-16751a749d29" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.220176] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.220951] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.304055] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "567d2348-be32-4158-a5e0-0a724ca81299" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.304231] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "567d2348-be32-4158-a5e0-0a724ca81299" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.315826] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2080.365045] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.365343] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.366751] env[62346]: INFO nova.compute.claims [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2080.520333] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf2a723-3b9d-46a5-b33a-e4b16ef1cd24 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.528363] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde854e4-8ed8-4034-bc12-87cd5ec3ae3a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.560290] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86b72d3-caec-4c05-b3dc-4fe8999c91e4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.568485] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f03fb08-9847-4905-9bf1-1e35ddfc9879 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.582131] env[62346]: DEBUG nova.compute.provider_tree [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2080.590581] env[62346]: DEBUG nova.scheduler.client.report [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2080.603969] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.239s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.604456] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2080.638628] env[62346]: DEBUG nova.compute.utils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2080.640179] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2080.640355] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2080.649923] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2080.698884] env[62346]: DEBUG nova.policy [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c601083f0a44da850b33189c701bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abc1ead3f9a9442ca0b85f152f94fe6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 2080.714273] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2080.740644] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2080.740904] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2080.741075] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2080.741261] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2080.741410] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2080.741559] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2080.741769] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2080.741931] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2080.742114] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2080.742279] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2080.742452] env[62346]: DEBUG nova.virt.hardware [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2080.743376] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b6df5a-414e-40ce-85b2-864dae8d2064 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.752805] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba51e07-3198-45bf-9a4e-d5e89aacac57 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.139038] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Successfully created port: b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2081.790708] env[62346]: DEBUG nova.compute.manager [req-a36e9272-e44f-45b9-a316-eb77759fb272 req-29d4ef4c-d603-4d49-bc48-4921533e707e service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Received event network-vif-plugged-b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2081.791024] env[62346]: DEBUG oslo_concurrency.lockutils [req-a36e9272-e44f-45b9-a316-eb77759fb272 req-29d4ef4c-d603-4d49-bc48-4921533e707e service nova] Acquiring lock "567d2348-be32-4158-a5e0-0a724ca81299-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.791169] env[62346]: DEBUG oslo_concurrency.lockutils [req-a36e9272-e44f-45b9-a316-eb77759fb272 req-29d4ef4c-d603-4d49-bc48-4921533e707e service nova] Lock "567d2348-be32-4158-a5e0-0a724ca81299-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.791337] env[62346]: DEBUG oslo_concurrency.lockutils [req-a36e9272-e44f-45b9-a316-eb77759fb272 req-29d4ef4c-d603-4d49-bc48-4921533e707e service nova] Lock "567d2348-be32-4158-a5e0-0a724ca81299-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.791499] env[62346]: DEBUG nova.compute.manager [req-a36e9272-e44f-45b9-a316-eb77759fb272 req-29d4ef4c-d603-4d49-bc48-4921533e707e service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] No waiting events found dispatching network-vif-plugged-b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2081.791659] env[62346]: WARNING nova.compute.manager [req-a36e9272-e44f-45b9-a316-eb77759fb272 req-29d4ef4c-d603-4d49-bc48-4921533e707e service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Received unexpected event network-vif-plugged-b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 for instance with vm_state building and task_state spawning. [ 2081.875305] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Successfully updated port: b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2081.890106] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "refresh_cache-567d2348-be32-4158-a5e0-0a724ca81299" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.890258] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "refresh_cache-567d2348-be32-4158-a5e0-0a724ca81299" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.890537] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2081.936539] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2082.157772] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Updating instance_info_cache with network_info: [{"id": "b3ab8c12-3acc-4750-9c8a-3bb20d430ee4", "address": "fa:16:3e:e3:f1:ee", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3ab8c12-3a", "ovs_interfaceid": "b3ab8c12-3acc-4750-9c8a-3bb20d430ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2082.173436] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "refresh_cache-567d2348-be32-4158-a5e0-0a724ca81299" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.173782] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance network_info: |[{"id": "b3ab8c12-3acc-4750-9c8a-3bb20d430ee4", "address": "fa:16:3e:e3:f1:ee", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3ab8c12-3a", "ovs_interfaceid": "b3ab8c12-3acc-4750-9c8a-3bb20d430ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2082.174531] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:f1:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3ab8c12-3acc-4750-9c8a-3bb20d430ee4', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2082.182542] env[62346]: DEBUG oslo.service.loopingcall [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.183230] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2082.183230] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4f2490d-9081-4fda-9683-0a669c3b0f49 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.203207] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2082.203207] env[62346]: value = "task-4891798" [ 2082.203207] env[62346]: _type = "Task" [ 2082.203207] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.212193] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891798, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.219822] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.377243] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.714445] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891798, 'name': CreateVM_Task, 'duration_secs': 0.29736} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.714637] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2082.715400] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.715575] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.715915] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2082.716210] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f54a4af-bc96-407c-af2e-60dd54fb3559 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.722013] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2082.722013] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ff66dc-c855-7584-c6e7-ae66f4ebf8c2" [ 2082.722013] env[62346]: _type = "Task" [ 2082.722013] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.731824] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52ff66dc-c855-7584-c6e7-ae66f4ebf8c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.220113] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.220499] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2083.233029] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.233320] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2083.233541] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.821107] env[62346]: DEBUG nova.compute.manager [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Received event network-changed-b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2083.821320] env[62346]: DEBUG nova.compute.manager [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Refreshing instance network info cache due to event network-changed-b3ab8c12-3acc-4750-9c8a-3bb20d430ee4. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2083.821589] env[62346]: DEBUG oslo_concurrency.lockutils [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] Acquiring lock "refresh_cache-567d2348-be32-4158-a5e0-0a724ca81299" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.821635] env[62346]: DEBUG oslo_concurrency.lockutils [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] Acquired lock "refresh_cache-567d2348-be32-4158-a5e0-0a724ca81299" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.821790] env[62346]: DEBUG nova.network.neutron [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Refreshing network info cache for port b3ab8c12-3acc-4750-9c8a-3bb20d430ee4 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2084.138319] env[62346]: DEBUG nova.network.neutron [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Updated VIF entry in instance network info cache for port b3ab8c12-3acc-4750-9c8a-3bb20d430ee4. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2084.138673] env[62346]: DEBUG nova.network.neutron [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Updating instance_info_cache with network_info: [{"id": "b3ab8c12-3acc-4750-9c8a-3bb20d430ee4", "address": "fa:16:3e:e3:f1:ee", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3ab8c12-3a", "ovs_interfaceid": "b3ab8c12-3acc-4750-9c8a-3bb20d430ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2084.148187] env[62346]: DEBUG oslo_concurrency.lockutils [req-dcd09f02-9222-41fe-8ade-d2cce26d077c req-54c5c1e7-adcd-4aaf-b2f6-384eacef4277 service nova] Releasing lock "refresh_cache-567d2348-be32-4158-a5e0-0a724ca81299" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.216017] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2092.238283] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.735857] env[62346]: WARNING oslo_vmware.rw_handles [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2127.735857] env[62346]: ERROR oslo_vmware.rw_handles [ 2127.736626] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2127.738158] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2127.738411] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Copying Virtual Disk [datastore2] vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/0dc4aa08-fd07-4d71-88b9-09e57ea04da2/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2127.738680] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e11515a0-3067-4b1b-ae4c-caa80f6c9e82 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.747419] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 2127.747419] env[62346]: value = "task-4891799" [ 2127.747419] env[62346]: _type = "Task" [ 2127.747419] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.755757] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.258338] env[62346]: DEBUG oslo_vmware.exceptions [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2128.258653] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.259241] env[62346]: ERROR nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2128.259241] env[62346]: Faults: ['InvalidArgument'] [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Traceback (most recent call last): [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] yield resources [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self.driver.spawn(context, instance, image_meta, [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self._fetch_image_if_missing(context, vi) [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] image_cache(vi, tmp_image_ds_loc) [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] vm_util.copy_virtual_disk( [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] session._wait_for_task(vmdk_copy_task) [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] return self.wait_for_task(task_ref) [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] return evt.wait() [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] result = hub.switch() [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] return self.greenlet.switch() [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self.f(*self.args, **self.kw) [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] raise exceptions.translate_fault(task_info.error) [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Faults: ['InvalidArgument'] [ 2128.259241] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] [ 2128.260087] env[62346]: INFO nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Terminating instance [ 2128.261220] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.261427] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2128.261684] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b49b9dcc-5242-4f4e-83f8-7a1a84a7cb4d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.264248] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2128.264447] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2128.265201] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca409c2d-e12c-42d1-9c0c-6ee13091399c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.272561] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2128.272779] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a20063da-7764-4a4a-920f-41f2874c77a2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.275222] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2128.275390] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2128.276420] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c4f586e-f2c3-467c-9c5a-744e7cc58fad {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.281706] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2128.281706] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5298eb9f-576d-fae8-3d68-f17b36c367a5" [ 2128.281706] env[62346]: _type = "Task" [ 2128.281706] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.289396] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5298eb9f-576d-fae8-3d68-f17b36c367a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.352457] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2128.352680] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2128.352861] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleting the datastore file [datastore2] 21a988a5-43cc-44f8-97f4-01c5442b6303 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2128.353150] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8bd28de-9d14-40af-b70c-ea5ffe421c59 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.360130] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 2128.360130] env[62346]: value = "task-4891801" [ 2128.360130] env[62346]: _type = "Task" [ 2128.360130] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.368400] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891801, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.796065] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2128.796463] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2128.796747] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-193701a5-5e5b-416f-bcd3-1244f84b6803 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.810679] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2128.810959] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Fetch image to [datastore2] vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2128.811238] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2128.812341] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dcbf7e-9b0a-4191-9539-adc23945833c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.822011] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c563367-3e97-4f95-9f27-3aa291e680dc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.835254] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac603e9c-3cf7-4e0d-8e12-e85ae6bdea2d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.888100] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdaa1b7-17d2-4208-adb1-2deb2f61d40f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.899825] env[62346]: DEBUG oslo_vmware.api [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891801, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081095} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.902236] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2128.902521] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2128.902766] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2128.903024] env[62346]: INFO nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Took 0.64 seconds to destroy the instance on the hypervisor. [ 2128.905451] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-498fc91b-907e-40b5-849e-26823989f6a1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.907691] env[62346]: DEBUG nova.compute.claims [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2128.907835] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.908030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.932537] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2128.991039] env[62346]: DEBUG oslo_vmware.rw_handles [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2129.052030] env[62346]: DEBUG oslo_vmware.rw_handles [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2129.052188] env[62346]: DEBUG oslo_vmware.rw_handles [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2129.128060] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9741dc99-2376-4cf8-ad9e-cf8bb07ada7d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.136589] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2922ea3-6ac6-4767-a4a0-f6ddf6ed2fe7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.167630] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1ea34c-c85b-400a-91a9-05f79aa73603 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.175610] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bca154a-58cb-41b0-87f5-a5ca565eef97 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.189999] env[62346]: DEBUG nova.compute.provider_tree [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2129.199401] env[62346]: DEBUG nova.scheduler.client.report [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2129.219385] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.311s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.219736] env[62346]: ERROR nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2129.219736] env[62346]: Faults: ['InvalidArgument'] [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Traceback (most recent call last): [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self.driver.spawn(context, instance, image_meta, [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self._fetch_image_if_missing(context, vi) [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] image_cache(vi, tmp_image_ds_loc) [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] vm_util.copy_virtual_disk( [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] session._wait_for_task(vmdk_copy_task) [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] return self.wait_for_task(task_ref) [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] return evt.wait() [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] result = hub.switch() [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] return self.greenlet.switch() [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] self.f(*self.args, **self.kw) [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] raise exceptions.translate_fault(task_info.error) [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Faults: ['InvalidArgument'] [ 2129.219736] env[62346]: ERROR nova.compute.manager [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] [ 2129.220573] env[62346]: DEBUG nova.compute.utils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2129.222392] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Build of instance 21a988a5-43cc-44f8-97f4-01c5442b6303 was re-scheduled: A specified parameter was not correct: fileType [ 2129.222392] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2129.222777] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2129.222947] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2129.223133] env[62346]: DEBUG nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2129.223333] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2129.625535] env[62346]: DEBUG nova.network.neutron [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.637577] env[62346]: INFO nova.compute.manager [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Took 0.41 seconds to deallocate network for instance. [ 2129.739615] env[62346]: INFO nova.scheduler.client.report [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleted allocations for instance 21a988a5-43cc-44f8-97f4-01c5442b6303 [ 2129.773024] env[62346]: DEBUG oslo_concurrency.lockutils [None req-613f47fc-b3bf-4c6f-b782-f51e439f21da tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.545s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.773024] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.291s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.773024] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "21a988a5-43cc-44f8-97f4-01c5442b6303-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.773024] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.773024] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.776358] env[62346]: INFO nova.compute.manager [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Terminating instance [ 2129.780146] env[62346]: DEBUG nova.compute.manager [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2129.780346] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2129.780620] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02d50c33-39fb-423d-b7dd-ec666b648b04 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.790590] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9026579b-9161-41c2-b520-0ff00181b412 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.822272] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21a988a5-43cc-44f8-97f4-01c5442b6303 could not be found. [ 2129.822692] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2129.822692] env[62346]: INFO nova.compute.manager [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2129.822902] env[62346]: DEBUG oslo.service.loopingcall [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2129.823155] env[62346]: DEBUG nova.compute.manager [-] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2129.823376] env[62346]: DEBUG nova.network.neutron [-] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2129.852420] env[62346]: DEBUG nova.network.neutron [-] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.861076] env[62346]: INFO nova.compute.manager [-] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] Took 0.04 seconds to deallocate network for instance. [ 2129.990726] env[62346]: DEBUG oslo_concurrency.lockutils [None req-7c71cadd-e423-430a-8b1a-13bda574e00d tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.991758] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 385.302s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.991979] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 21a988a5-43cc-44f8-97f4-01c5442b6303] During sync_power_state the instance has a pending task (deleting). Skip. [ 2129.992221] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "21a988a5-43cc-44f8-97f4-01c5442b6303" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.220301] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.231406] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.231647] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.231841] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.232018] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2135.233143] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dba37a8-69ec-489f-ab5b-b426ce8830be {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.242049] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d3dafe-ba6a-486b-9b16-80248987a84f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.257068] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde18781-f9ad-400e-9146-67cd04ae5270 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.264166] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384247c5-36c1-4ff5-a0f6-7bb5808d7695 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.295021] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180584MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2135.295193] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.295556] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.365075] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 052de992-f28b-4c25-bfbe-3517665f1902 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.365243] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.365371] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.365494] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.365612] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.365727] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.365840] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 567d2348-be32-4158-a5e0-0a724ca81299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.366036] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2135.366192] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '86', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '6', 'num_os_type_None': '7', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '2', 'io_workload': '7', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_task_spawning': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2135.469803] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803e5db5-61a6-4fb8-97ad-c3688d921736 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.478036] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5faf376c-b8ff-40b6-8ad4-0e2404503bae {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.510516] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb21a6a1-27da-401e-a05b-6b6eda3fb8fa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.517663] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33eecc4a-e958-457e-9476-617e3ab0d9c8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.532163] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2135.542542] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2135.559847] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2135.560067] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.265s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.555396] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.555771] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.555971] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2138.556149] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2138.575484] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.575651] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.575872] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.576018] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.576150] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.576273] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.576393] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2138.576516] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2139.219849] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.220448] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.220866] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.220066] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.220441] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2144.221026] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2154.220249] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.755796] env[62346]: WARNING oslo_vmware.rw_handles [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2177.755796] env[62346]: ERROR oslo_vmware.rw_handles [ 2177.756479] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2177.758378] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2177.758642] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Copying Virtual Disk [datastore2] vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/b720bd03-6036-45e5-a21c-b74f1edafb91/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2177.758965] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-844b186c-7598-49b7-858d-1d2cd6a76715 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.768685] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2177.768685] env[62346]: value = "task-4891802" [ 2177.768685] env[62346]: _type = "Task" [ 2177.768685] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.777770] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.281680] env[62346]: DEBUG oslo_vmware.exceptions [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2178.281680] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.281680] env[62346]: ERROR nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2178.281680] env[62346]: Faults: ['InvalidArgument'] [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Traceback (most recent call last): [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] yield resources [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self.driver.spawn(context, instance, image_meta, [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self._fetch_image_if_missing(context, vi) [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] image_cache(vi, tmp_image_ds_loc) [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] vm_util.copy_virtual_disk( [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] session._wait_for_task(vmdk_copy_task) [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] return self.wait_for_task(task_ref) [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] return evt.wait() [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] result = hub.switch() [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] return self.greenlet.switch() [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self.f(*self.args, **self.kw) [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] raise exceptions.translate_fault(task_info.error) [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Faults: ['InvalidArgument'] [ 2178.281680] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] [ 2178.282791] env[62346]: INFO nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Terminating instance [ 2178.283592] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.283823] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2178.284077] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d222a9c-db3a-4060-8c30-db8d9a1f1cc4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.286366] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2178.286558] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2178.287272] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b8caf2-e8d9-45e1-aed1-8d6825bc5872 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.294129] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2178.294372] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82b4e2ae-410e-4f2e-8a33-8a69169687c2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.296611] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2178.296781] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2178.297737] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ced8b23-a963-43f4-8041-243e151e6ef7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.302509] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Waiting for the task: (returnval){ [ 2178.302509] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523035a6-735e-eac0-7774-248df4e40e77" [ 2178.302509] env[62346]: _type = "Task" [ 2178.302509] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.310575] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523035a6-735e-eac0-7774-248df4e40e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.369082] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2178.369082] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2178.369327] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleting the datastore file [datastore2] 052de992-f28b-4c25-bfbe-3517665f1902 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2178.369545] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7d621b2-6355-484d-ab40-748494c06edf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.375336] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2178.375336] env[62346]: value = "task-4891804" [ 2178.375336] env[62346]: _type = "Task" [ 2178.375336] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.383391] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.812426] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2178.812790] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Creating directory with path [datastore2] vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2178.812865] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d959f69-003d-4a65-9f14-9c02186715d0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.824533] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Created directory with path [datastore2] vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2178.824727] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Fetch image to [datastore2] vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2178.824890] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2178.825655] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06736df3-abf6-4e8b-8e84-701ed2d47ef0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.832285] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f241b93c-af14-446b-8e7a-8e588449f237 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.841440] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647e655c-6b6b-45ca-a17d-c57d5cb61616 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.871235] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa571ac-5cf4-4076-bd3f-5ccd51fe9da8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.879853] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5b850cc1-7b8d-4a71-8422-ddcc94d54036 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.886138] env[62346]: DEBUG oslo_vmware.api [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08113} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.886377] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2178.886582] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2178.886751] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2178.886921] env[62346]: INFO nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2178.889094] env[62346]: DEBUG nova.compute.claims [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2178.889313] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.889544] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.905750] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2178.968433] env[62346]: DEBUG oslo_vmware.rw_handles [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2179.035828] env[62346]: DEBUG oslo_vmware.rw_handles [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2179.036017] env[62346]: DEBUG oslo_vmware.rw_handles [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2179.108889] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cc9038-08ea-466a-8c45-93b3ada838d7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.116753] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed051171-3a09-4848-9845-cd356e782e1e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.147639] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f30325c-18f1-4b38-a31f-0fdc487f9788 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.155808] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de9fa3b-fbac-4735-bc82-67056ccd0333 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.169572] env[62346]: DEBUG nova.compute.provider_tree [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2179.178488] env[62346]: DEBUG nova.scheduler.client.report [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2179.194156] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.194706] env[62346]: ERROR nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.194706] env[62346]: Faults: ['InvalidArgument'] [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Traceback (most recent call last): [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self.driver.spawn(context, instance, image_meta, [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self._fetch_image_if_missing(context, vi) [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] image_cache(vi, tmp_image_ds_loc) [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] vm_util.copy_virtual_disk( [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] session._wait_for_task(vmdk_copy_task) [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] return self.wait_for_task(task_ref) [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] return evt.wait() [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] result = hub.switch() [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] return self.greenlet.switch() [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] self.f(*self.args, **self.kw) [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] raise exceptions.translate_fault(task_info.error) [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Faults: ['InvalidArgument'] [ 2179.194706] env[62346]: ERROR nova.compute.manager [instance: 052de992-f28b-4c25-bfbe-3517665f1902] [ 2179.195790] env[62346]: DEBUG nova.compute.utils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2179.197096] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Build of instance 052de992-f28b-4c25-bfbe-3517665f1902 was re-scheduled: A specified parameter was not correct: fileType [ 2179.197096] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2179.197505] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2179.197678] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2179.197851] env[62346]: DEBUG nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2179.198077] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2179.511401] env[62346]: DEBUG nova.network.neutron [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.523154] env[62346]: INFO nova.compute.manager [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Took 0.32 seconds to deallocate network for instance. [ 2179.623432] env[62346]: INFO nova.scheduler.client.report [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted allocations for instance 052de992-f28b-4c25-bfbe-3517665f1902 [ 2179.646033] env[62346]: DEBUG oslo_concurrency.lockutils [None req-53980b2f-2f55-405f-b8a5-b47b94b483dd tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "052de992-f28b-4c25-bfbe-3517665f1902" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 491.357s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.646384] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "052de992-f28b-4c25-bfbe-3517665f1902" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 434.956s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.646783] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] During sync_power_state the instance has a pending task (spawning). Skip. [ 2179.646783] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "052de992-f28b-4c25-bfbe-3517665f1902" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.647290] env[62346]: DEBUG oslo_concurrency.lockutils [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "052de992-f28b-4c25-bfbe-3517665f1902" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 295.911s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.647508] env[62346]: DEBUG oslo_concurrency.lockutils [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "052de992-f28b-4c25-bfbe-3517665f1902-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.647709] env[62346]: DEBUG oslo_concurrency.lockutils [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "052de992-f28b-4c25-bfbe-3517665f1902-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.647872] env[62346]: DEBUG oslo_concurrency.lockutils [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "052de992-f28b-4c25-bfbe-3517665f1902-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.650448] env[62346]: INFO nova.compute.manager [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Terminating instance [ 2179.652183] env[62346]: DEBUG nova.compute.manager [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2179.652445] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2179.652788] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-847830f8-e964-4cd5-98be-ff10b650fdd2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.663043] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7543803d-6165-45d8-a8c8-0d8d4df2a0c6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.694743] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 052de992-f28b-4c25-bfbe-3517665f1902 could not be found. [ 2179.695041] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2179.695255] env[62346]: INFO nova.compute.manager [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2179.695554] env[62346]: DEBUG oslo.service.loopingcall [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2179.695769] env[62346]: DEBUG nova.compute.manager [-] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2179.695863] env[62346]: DEBUG nova.network.neutron [-] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2179.728813] env[62346]: DEBUG nova.network.neutron [-] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.738095] env[62346]: INFO nova.compute.manager [-] [instance: 052de992-f28b-4c25-bfbe-3517665f1902] Took 0.04 seconds to deallocate network for instance. [ 2179.825675] env[62346]: DEBUG oslo_concurrency.lockutils [None req-25b792d8-80ed-467e-8265-7c7637ad2f82 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "052de992-f28b-4c25-bfbe-3517665f1902" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.921761] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "2511c176-53c8-4b4c-99a7-597215dba604" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.922410] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "2511c176-53c8-4b4c-99a7-597215dba604" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.945906] env[62346]: DEBUG nova.compute.manager [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2184.006465] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.006781] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.008398] env[62346]: INFO nova.compute.claims [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2184.170833] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96980975-3665-41a4-9aae-228c0b5b86a5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.178816] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692f4003-e68c-417c-b8a7-2c41222dd1cf {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.210332] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314ecc99-907a-4d5b-8599-92f1350f62d1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.218352] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6672357-8b16-41d0-b4f4-e557b17c0e80 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.231974] env[62346]: DEBUG nova.compute.provider_tree [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2184.242772] env[62346]: DEBUG nova.scheduler.client.report [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2184.257424] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.251s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.257928] env[62346]: DEBUG nova.compute.manager [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2184.292885] env[62346]: DEBUG nova.compute.utils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2184.294412] env[62346]: DEBUG nova.compute.manager [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2184.294635] env[62346]: DEBUG nova.network.neutron [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2184.305714] env[62346]: DEBUG nova.compute.manager [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2184.363029] env[62346]: DEBUG nova.policy [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b68a54af151441e6b6853c5502518db8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5afa33f3f2b94e68a5161002a9718f78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 2184.382335] env[62346]: DEBUG nova.compute.manager [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2184.408183] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2184.408460] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2184.408613] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2184.408794] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2184.408939] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2184.409107] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2184.409316] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2184.409473] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2184.409638] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2184.409799] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2184.409972] env[62346]: DEBUG nova.virt.hardware [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2184.410837] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0200faab-2f4b-4115-aa46-6ff2c2e4ec0b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.419580] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f509b19-9a4e-4851-ae09-21af4ad46ee4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.728570] env[62346]: DEBUG nova.network.neutron [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Successfully created port: 8663d7e6-ac45-4a4d-9140-dfc2c599ec99 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2185.400054] env[62346]: DEBUG nova.compute.manager [req-7fa8276f-3f0e-494e-bff7-4fd82c8ee788 req-217b5962-35c5-4014-a2b0-c00b57f0c456 service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Received event network-vif-plugged-8663d7e6-ac45-4a4d-9140-dfc2c599ec99 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2185.400326] env[62346]: DEBUG oslo_concurrency.lockutils [req-7fa8276f-3f0e-494e-bff7-4fd82c8ee788 req-217b5962-35c5-4014-a2b0-c00b57f0c456 service nova] Acquiring lock "2511c176-53c8-4b4c-99a7-597215dba604-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.400512] env[62346]: DEBUG oslo_concurrency.lockutils [req-7fa8276f-3f0e-494e-bff7-4fd82c8ee788 req-217b5962-35c5-4014-a2b0-c00b57f0c456 service nova] Lock "2511c176-53c8-4b4c-99a7-597215dba604-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.400686] env[62346]: DEBUG oslo_concurrency.lockutils [req-7fa8276f-3f0e-494e-bff7-4fd82c8ee788 req-217b5962-35c5-4014-a2b0-c00b57f0c456 service nova] Lock "2511c176-53c8-4b4c-99a7-597215dba604-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.400855] env[62346]: DEBUG nova.compute.manager [req-7fa8276f-3f0e-494e-bff7-4fd82c8ee788 req-217b5962-35c5-4014-a2b0-c00b57f0c456 service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] No waiting events found dispatching network-vif-plugged-8663d7e6-ac45-4a4d-9140-dfc2c599ec99 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2185.401342] env[62346]: WARNING nova.compute.manager [req-7fa8276f-3f0e-494e-bff7-4fd82c8ee788 req-217b5962-35c5-4014-a2b0-c00b57f0c456 service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Received unexpected event network-vif-plugged-8663d7e6-ac45-4a4d-9140-dfc2c599ec99 for instance with vm_state building and task_state spawning. [ 2185.672358] env[62346]: DEBUG nova.network.neutron [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Successfully updated port: 8663d7e6-ac45-4a4d-9140-dfc2c599ec99 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2185.693532] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "refresh_cache-2511c176-53c8-4b4c-99a7-597215dba604" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2185.693687] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "refresh_cache-2511c176-53c8-4b4c-99a7-597215dba604" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2185.693825] env[62346]: DEBUG nova.network.neutron [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2185.745717] env[62346]: DEBUG nova.network.neutron [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2185.986430] env[62346]: DEBUG nova.network.neutron [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Updating instance_info_cache with network_info: [{"id": "8663d7e6-ac45-4a4d-9140-dfc2c599ec99", "address": "fa:16:3e:5f:7d:a1", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8663d7e6-ac", "ovs_interfaceid": "8663d7e6-ac45-4a4d-9140-dfc2c599ec99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.999830] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "refresh_cache-2511c176-53c8-4b4c-99a7-597215dba604" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2186.000152] env[62346]: DEBUG nova.compute.manager [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Instance network_info: |[{"id": "8663d7e6-ac45-4a4d-9140-dfc2c599ec99", "address": "fa:16:3e:5f:7d:a1", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8663d7e6-ac", "ovs_interfaceid": "8663d7e6-ac45-4a4d-9140-dfc2c599ec99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2186.000580] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:7d:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8663d7e6-ac45-4a4d-9140-dfc2c599ec99', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2186.008137] env[62346]: DEBUG oslo.service.loopingcall [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2186.008719] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2186.008963] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c4bdee8-9e5b-4f4c-87b1-f17aff0e8546 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.029939] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2186.029939] env[62346]: value = "task-4891805" [ 2186.029939] env[62346]: _type = "Task" [ 2186.029939] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.038281] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891805, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.540881] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891805, 'name': CreateVM_Task, 'duration_secs': 0.30589} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.541244] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2186.541741] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.541921] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.542260] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2186.542518] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4cdf42d-4185-4b56-97a1-9a724e620853 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.547065] env[62346]: DEBUG oslo_vmware.api [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 2186.547065] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52c5f259-1b92-84ee-1ba2-98b26a01f7a2" [ 2186.547065] env[62346]: _type = "Task" [ 2186.547065] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.554946] env[62346]: DEBUG oslo_vmware.api [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52c5f259-1b92-84ee-1ba2-98b26a01f7a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.057873] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.058159] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2187.058370] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.578546] env[62346]: DEBUG nova.compute.manager [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Received event network-changed-8663d7e6-ac45-4a4d-9140-dfc2c599ec99 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2187.578857] env[62346]: DEBUG nova.compute.manager [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Refreshing instance network info cache due to event network-changed-8663d7e6-ac45-4a4d-9140-dfc2c599ec99. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2187.579031] env[62346]: DEBUG oslo_concurrency.lockutils [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] Acquiring lock "refresh_cache-2511c176-53c8-4b4c-99a7-597215dba604" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.579105] env[62346]: DEBUG oslo_concurrency.lockutils [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] Acquired lock "refresh_cache-2511c176-53c8-4b4c-99a7-597215dba604" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.579290] env[62346]: DEBUG nova.network.neutron [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Refreshing network info cache for port 8663d7e6-ac45-4a4d-9140-dfc2c599ec99 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2187.833293] env[62346]: DEBUG nova.network.neutron [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Updated VIF entry in instance network info cache for port 8663d7e6-ac45-4a4d-9140-dfc2c599ec99. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2187.833725] env[62346]: DEBUG nova.network.neutron [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Updating instance_info_cache with network_info: [{"id": "8663d7e6-ac45-4a4d-9140-dfc2c599ec99", "address": "fa:16:3e:5f:7d:a1", "network": {"id": "7f484a6f-8d5e-461e-9142-2b63240507f8", "bridge": "br-int", "label": "tempest-ServersTestJSON-62332976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5afa33f3f2b94e68a5161002a9718f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8663d7e6-ac", "ovs_interfaceid": "8663d7e6-ac45-4a4d-9140-dfc2c599ec99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.843067] env[62346]: DEBUG oslo_concurrency.lockutils [req-42a9f71d-5aef-4c57-babf-dafeabe640c7 req-bb91dc96-a42f-44c9-aad6-3c1df7c9162e service nova] Releasing lock "refresh_cache-2511c176-53c8-4b4c-99a7-597215dba604" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2195.220839] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2195.232990] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.233247] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.233417] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.233577] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2195.235095] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26919569-5ce2-4e88-aef0-531eae1c237a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.243480] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22bf9c1c-5957-406f-83ac-36f80118a396 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.258698] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9bb6ac-1ec4-499e-b981-58dd7b89f25c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.265277] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f94022-267e-46d2-9df9-6147710a5e2b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.295233] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180591MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2195.295404] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.295624] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.364560] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.364724] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.364856] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.364981] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.365175] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.365269] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 567d2348-be32-4158-a5e0-0a724ca81299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.365390] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2511c176-53c8-4b4c-99a7-597215dba604 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2195.365576] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2195.365727] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '87', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '5', 'num_os_type_None': '7', 'num_proj_07ea81cc0ce14cb19c28dd7011ca9fd7': '1', 'io_workload': '7', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '2', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_task_spawning': '2', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2195.470737] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1374d335-0409-47fd-b8aa-25c20a125bbb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.478780] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcc6884-f21f-4e68-be36-ea36ebcda2fe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.509059] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8fdc73-da57-4040-87d2-fa401a7f140c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.516834] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e594c-0a5c-4ad8-982c-fed132374f28 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.530790] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2195.538860] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2195.554766] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2195.554911] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.259s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.555273] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.555645] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2198.555645] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2198.573376] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.573533] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.573655] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.573783] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.573903] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.574069] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.574192] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2198.574321] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2199.235472] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2200.219999] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.219977] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.220756] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.219649] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.219873] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2204.221671] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.215747] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.220564] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2225.984306] env[62346]: WARNING oslo_vmware.rw_handles [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2225.984306] env[62346]: ERROR oslo_vmware.rw_handles [ 2225.985059] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2225.986943] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2225.987239] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Copying Virtual Disk [datastore2] vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/247b9b65-6c60-499e-a794-48c64b6ce62d/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2225.987528] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-232a6127-ed4b-48e9-b558-1033819221e6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.996154] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Waiting for the task: (returnval){ [ 2225.996154] env[62346]: value = "task-4891806" [ 2225.996154] env[62346]: _type = "Task" [ 2225.996154] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.004658] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Task: {'id': task-4891806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.507221] env[62346]: DEBUG oslo_vmware.exceptions [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2226.507523] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.508093] env[62346]: ERROR nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2226.508093] env[62346]: Faults: ['InvalidArgument'] [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Traceback (most recent call last): [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] yield resources [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self.driver.spawn(context, instance, image_meta, [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self._fetch_image_if_missing(context, vi) [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] image_cache(vi, tmp_image_ds_loc) [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] vm_util.copy_virtual_disk( [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] session._wait_for_task(vmdk_copy_task) [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] return self.wait_for_task(task_ref) [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] return evt.wait() [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] result = hub.switch() [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] return self.greenlet.switch() [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self.f(*self.args, **self.kw) [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] raise exceptions.translate_fault(task_info.error) [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Faults: ['InvalidArgument'] [ 2226.508093] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] [ 2226.508930] env[62346]: INFO nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Terminating instance [ 2226.510052] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2226.510270] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2226.510517] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbf1cfce-ec71-477e-a9f0-3f71e881fe65 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.512829] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2226.513038] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2226.513783] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c8578f-529c-4171-b4f2-468cc98bb218 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.521185] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2226.521442] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b36554eb-87b3-465d-ba69-3131b0244078 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.523973] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2226.524156] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2226.525177] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-146d463e-07b6-457e-80cf-81650bfbefe1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.530427] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 2226.530427] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52e195e0-f811-61c9-d975-2bfa1e329b54" [ 2226.530427] env[62346]: _type = "Task" [ 2226.530427] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.540110] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52e195e0-f811-61c9-d975-2bfa1e329b54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.598272] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2226.598505] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2226.598734] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Deleting the datastore file [datastore2] 0f6433b0-fa14-4546-b4f0-c7c1edf8433e {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2226.599086] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58037b5a-2759-4e41-a1ec-5adfa2124b2e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.606007] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Waiting for the task: (returnval){ [ 2226.606007] env[62346]: value = "task-4891808" [ 2226.606007] env[62346]: _type = "Task" [ 2226.606007] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.615846] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Task: {'id': task-4891808, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.041121] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2227.041544] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating directory with path [datastore2] vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2227.041622] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99d80219-d4be-4545-9d3e-a7b6bf12545e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.053405] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created directory with path [datastore2] vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2227.053605] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Fetch image to [datastore2] vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2227.053762] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2227.054552] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbf947f-b1a7-4bd3-a7c2-72fecc8b2e6e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.061297] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534972f-a436-4d2c-aeef-68e3b1e8e9f3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.070877] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4063c18-1664-44d2-9409-dd0fb2d8ebde {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.102057] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3826dd0-b831-4b5b-8240-270a44817469 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.111611] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4a2a26b0-6148-4c3e-9124-2fccd10c8949 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.119022] env[62346]: DEBUG oslo_vmware.api [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Task: {'id': task-4891808, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067938} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.119265] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2227.119449] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2227.119634] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2227.119850] env[62346]: INFO nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2227.122116] env[62346]: DEBUG nova.compute.claims [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2227.122280] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.122518] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.134177] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2227.187505] env[62346]: DEBUG oslo_vmware.rw_handles [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2227.246747] env[62346]: DEBUG oslo_vmware.rw_handles [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2227.246945] env[62346]: DEBUG oslo_vmware.rw_handles [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2227.311935] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5130ab2-1bea-4212-8ca7-446aa614150a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.320255] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494b5eba-1e2f-492e-a8a1-4fb40a38f183 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.350644] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8e7f61-a517-411e-bb21-30fe56f0e795 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.358432] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d032ab-700b-4d10-a349-930a00cf3916 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.372084] env[62346]: DEBUG nova.compute.provider_tree [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2227.380683] env[62346]: DEBUG nova.scheduler.client.report [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2227.395044] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.272s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.395668] env[62346]: ERROR nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2227.395668] env[62346]: Faults: ['InvalidArgument'] [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Traceback (most recent call last): [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self.driver.spawn(context, instance, image_meta, [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self._fetch_image_if_missing(context, vi) [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] image_cache(vi, tmp_image_ds_loc) [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] vm_util.copy_virtual_disk( [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] session._wait_for_task(vmdk_copy_task) [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] return self.wait_for_task(task_ref) [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] return evt.wait() [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] result = hub.switch() [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] return self.greenlet.switch() [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] self.f(*self.args, **self.kw) [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] raise exceptions.translate_fault(task_info.error) [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Faults: ['InvalidArgument'] [ 2227.395668] env[62346]: ERROR nova.compute.manager [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] [ 2227.396550] env[62346]: DEBUG nova.compute.utils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2227.397922] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Build of instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e was re-scheduled: A specified parameter was not correct: fileType [ 2227.397922] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2227.398325] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2227.398497] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2227.398670] env[62346]: DEBUG nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2227.398833] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2227.840713] env[62346]: DEBUG nova.network.neutron [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2227.852829] env[62346]: INFO nova.compute.manager [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Took 0.45 seconds to deallocate network for instance. [ 2227.951433] env[62346]: INFO nova.scheduler.client.report [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Deleted allocations for instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e [ 2227.974582] env[62346]: DEBUG oslo_concurrency.lockutils [None req-ed32e506-dddd-4a9a-977f-fa24884fb069 tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 485.578s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.974899] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 483.284s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.975088] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] During sync_power_state the instance has a pending task (spawning). Skip. [ 2227.975267] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.975923] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 289.786s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.976213] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Acquiring lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.976505] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.976738] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.979054] env[62346]: INFO nova.compute.manager [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Terminating instance [ 2227.984891] env[62346]: DEBUG nova.compute.manager [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2227.985105] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2227.985398] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-011cf4ee-91be-4b27-94f3-14e0a1040968 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.997210] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd8ece5-601b-42db-b5a4-d94060fcdf60 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.032310] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0f6433b0-fa14-4546-b4f0-c7c1edf8433e could not be found. [ 2228.032565] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2228.032778] env[62346]: INFO nova.compute.manager [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2228.033092] env[62346]: DEBUG oslo.service.loopingcall [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2228.033403] env[62346]: DEBUG nova.compute.manager [-] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2228.033511] env[62346]: DEBUG nova.network.neutron [-] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2228.060431] env[62346]: DEBUG nova.network.neutron [-] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2228.070367] env[62346]: INFO nova.compute.manager [-] [instance: 0f6433b0-fa14-4546-b4f0-c7c1edf8433e] Took 0.04 seconds to deallocate network for instance. [ 2228.173235] env[62346]: DEBUG oslo_concurrency.lockutils [None req-6a35634b-26a2-43ee-8f56-3b0603e6f11b tempest-InstanceActionsTestJSON-1047516123 tempest-InstanceActionsTestJSON-1047516123-project-member] Lock "0f6433b0-fa14-4546-b4f0-c7c1edf8433e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.608813] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquiring lock "1f945968-d3bb-4895-b084-373e02684bea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2243.609212] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Lock "1f945968-d3bb-4895-b084-373e02684bea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.620375] env[62346]: DEBUG nova.compute.manager [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2243.671365] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2243.671613] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.673156] env[62346]: INFO nova.compute.claims [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2243.809109] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4805b3-50f5-4564-aa60-94a96fe957b8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.818190] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261fe612-51c3-4a6e-b0be-246ac562d172 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.850719] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19dc60b-5ab0-464d-a96b-caabb18b25bb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.858342] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de91a52-c681-4edf-9446-bcb519df9f5f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.872631] env[62346]: DEBUG nova.compute.provider_tree [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2243.882892] env[62346]: DEBUG nova.scheduler.client.report [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2243.896806] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.225s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.897307] env[62346]: DEBUG nova.compute.manager [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2243.932023] env[62346]: DEBUG nova.compute.utils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2243.933615] env[62346]: DEBUG nova.compute.manager [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2243.933881] env[62346]: DEBUG nova.network.neutron [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2243.959684] env[62346]: DEBUG nova.compute.manager [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2244.004371] env[62346]: DEBUG nova.policy [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea36a14869d24f2d81f82e4b622bbdc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '876110afb8a6448fa8c62675fcfc8180', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 2244.030382] env[62346]: DEBUG nova.compute.manager [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2244.060991] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2244.061281] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2244.061449] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2244.061641] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2244.061831] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2244.062171] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2244.062403] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2244.062582] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2244.062945] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2244.063166] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2244.063353] env[62346]: DEBUG nova.virt.hardware [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2244.064352] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb02d8b3-d131-4901-be06-d5b8bc6525fc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.073715] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a800d4c-f745-42bf-a2cc-e26ede8f8e4d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.353175] env[62346]: DEBUG nova.network.neutron [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Successfully created port: ae675124-1abf-49c8-af51-334d28512a18 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2244.942819] env[62346]: DEBUG nova.compute.manager [req-babbe96f-a720-417d-ad81-c9e766b62489 req-101d280f-df06-4658-9f72-33ac3eff7e3c service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Received event network-vif-plugged-ae675124-1abf-49c8-af51-334d28512a18 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2244.943269] env[62346]: DEBUG oslo_concurrency.lockutils [req-babbe96f-a720-417d-ad81-c9e766b62489 req-101d280f-df06-4658-9f72-33ac3eff7e3c service nova] Acquiring lock "1f945968-d3bb-4895-b084-373e02684bea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.943592] env[62346]: DEBUG oslo_concurrency.lockutils [req-babbe96f-a720-417d-ad81-c9e766b62489 req-101d280f-df06-4658-9f72-33ac3eff7e3c service nova] Lock "1f945968-d3bb-4895-b084-373e02684bea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.943592] env[62346]: DEBUG oslo_concurrency.lockutils [req-babbe96f-a720-417d-ad81-c9e766b62489 req-101d280f-df06-4658-9f72-33ac3eff7e3c service nova] Lock "1f945968-d3bb-4895-b084-373e02684bea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.943736] env[62346]: DEBUG nova.compute.manager [req-babbe96f-a720-417d-ad81-c9e766b62489 req-101d280f-df06-4658-9f72-33ac3eff7e3c service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] No waiting events found dispatching network-vif-plugged-ae675124-1abf-49c8-af51-334d28512a18 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2244.943956] env[62346]: WARNING nova.compute.manager [req-babbe96f-a720-417d-ad81-c9e766b62489 req-101d280f-df06-4658-9f72-33ac3eff7e3c service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Received unexpected event network-vif-plugged-ae675124-1abf-49c8-af51-334d28512a18 for instance with vm_state building and task_state spawning. [ 2245.039238] env[62346]: DEBUG nova.network.neutron [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Successfully updated port: ae675124-1abf-49c8-af51-334d28512a18 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2245.053703] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquiring lock "refresh_cache-1f945968-d3bb-4895-b084-373e02684bea" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.053703] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquired lock "refresh_cache-1f945968-d3bb-4895-b084-373e02684bea" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.053703] env[62346]: DEBUG nova.network.neutron [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2245.109202] env[62346]: DEBUG nova.network.neutron [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2245.304758] env[62346]: DEBUG nova.network.neutron [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Updating instance_info_cache with network_info: [{"id": "ae675124-1abf-49c8-af51-334d28512a18", "address": "fa:16:3e:f8:ad:82", "network": {"id": "fc5171ba-c633-48ea-a27c-c50db547fa4b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1765126240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "876110afb8a6448fa8c62675fcfc8180", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae675124-1a", "ovs_interfaceid": "ae675124-1abf-49c8-af51-334d28512a18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.320150] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Releasing lock "refresh_cache-1f945968-d3bb-4895-b084-373e02684bea" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.320486] env[62346]: DEBUG nova.compute.manager [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Instance network_info: |[{"id": "ae675124-1abf-49c8-af51-334d28512a18", "address": "fa:16:3e:f8:ad:82", "network": {"id": "fc5171ba-c633-48ea-a27c-c50db547fa4b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1765126240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "876110afb8a6448fa8c62675fcfc8180", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae675124-1a", "ovs_interfaceid": "ae675124-1abf-49c8-af51-334d28512a18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2245.320873] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:ad:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae675124-1abf-49c8-af51-334d28512a18', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2245.328297] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Creating folder: Project (876110afb8a6448fa8c62675fcfc8180). Parent ref: group-v953204. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2245.328846] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4d5a255-316b-42de-9ea3-9d11371cbd94 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.340583] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Created folder: Project (876110afb8a6448fa8c62675fcfc8180) in parent group-v953204. [ 2245.340775] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Creating folder: Instances. Parent ref: group-v953322. {{(pid=62346) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2245.341042] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d047f12-b965-4b85-8d66-8f79b49c9d6f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.350736] env[62346]: INFO nova.virt.vmwareapi.vm_util [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Created folder: Instances in parent group-v953322. [ 2245.350977] env[62346]: DEBUG oslo.service.loopingcall [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.351182] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2245.351392] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b74ab27b-7e8b-41bd-9779-9fe54df9c6f8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.371951] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2245.371951] env[62346]: value = "task-4891811" [ 2245.371951] env[62346]: _type = "Task" [ 2245.371951] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.379704] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891811, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.882559] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891811, 'name': CreateVM_Task, 'duration_secs': 0.314399} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.882775] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2245.883426] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.883589] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.883913] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2245.884190] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1455b442-5a85-4b5e-91dc-c5dd2104da3c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.889118] env[62346]: DEBUG oslo_vmware.api [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Waiting for the task: (returnval){ [ 2245.889118] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52359e59-634f-6ae9-e5c5-5e6e8d426536" [ 2245.889118] env[62346]: _type = "Task" [ 2245.889118] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.896932] env[62346]: DEBUG oslo_vmware.api [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52359e59-634f-6ae9-e5c5-5e6e8d426536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.399837] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2246.400219] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2246.400284] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3f37e905-0252-4192-85aa-d1ef7a191895 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2246.974598] env[62346]: DEBUG nova.compute.manager [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Received event network-changed-ae675124-1abf-49c8-af51-334d28512a18 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2246.974935] env[62346]: DEBUG nova.compute.manager [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Refreshing instance network info cache due to event network-changed-ae675124-1abf-49c8-af51-334d28512a18. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2246.975204] env[62346]: DEBUG oslo_concurrency.lockutils [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] Acquiring lock "refresh_cache-1f945968-d3bb-4895-b084-373e02684bea" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2246.975357] env[62346]: DEBUG oslo_concurrency.lockutils [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] Acquired lock "refresh_cache-1f945968-d3bb-4895-b084-373e02684bea" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2246.975526] env[62346]: DEBUG nova.network.neutron [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Refreshing network info cache for port ae675124-1abf-49c8-af51-334d28512a18 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2247.235873] env[62346]: DEBUG nova.network.neutron [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Updated VIF entry in instance network info cache for port ae675124-1abf-49c8-af51-334d28512a18. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2247.236256] env[62346]: DEBUG nova.network.neutron [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Updating instance_info_cache with network_info: [{"id": "ae675124-1abf-49c8-af51-334d28512a18", "address": "fa:16:3e:f8:ad:82", "network": {"id": "fc5171ba-c633-48ea-a27c-c50db547fa4b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1765126240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "876110afb8a6448fa8c62675fcfc8180", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae675124-1a", "ovs_interfaceid": "ae675124-1abf-49c8-af51-334d28512a18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2247.247456] env[62346]: DEBUG oslo_concurrency.lockutils [req-39f37e22-8167-4fa3-8e57-45ae3d6a332e req-fcb7425b-632f-4fa8-86f0-caea2a1dd06d service nova] Releasing lock "refresh_cache-1f945968-d3bb-4895-b084-373e02684bea" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2255.220149] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2255.232208] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.232441] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.232631] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2255.232844] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2255.234302] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88ceffe-4085-491e-a307-089d62b0a02e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.242870] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4706c6-b98b-4890-9a10-916690a0dcd0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.256797] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f29405-294e-4167-9740-6e4f82ea02dd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.263489] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f4bc1e-9176-42c1-84be-2094a8fd5b23 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.292882] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180574MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2255.293048] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.293225] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.361865] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance ca0f017f-3bca-401f-8e70-83a7a5061116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362041] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362174] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362295] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362411] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 567d2348-be32-4158-a5e0-0a724ca81299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362524] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2511c176-53c8-4b4c-99a7-597215dba604 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362665] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1f945968-d3bb-4895-b084-373e02684bea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2255.362852] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2255.363027] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '88', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '4', 'num_os_type_None': '7', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '2', 'io_workload': '7', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_task_spawning': '3', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_proj_876110afb8a6448fa8c62675fcfc8180': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2255.453029] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39565fd3-1385-453a-af4c-35b07ee4f4c7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.460788] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a83e5a4-0254-4fd1-bb4a-593d7764e041 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.489849] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13114d50-2148-46a6-9ce7-840c43f6c901 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.497054] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cef229c-3a95-4209-90f8-23eb07f6bc46 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.510935] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2255.521434] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2255.536679] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2255.536865] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.244s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2259.532584] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.219962] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.220211] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2260.220381] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2260.239270] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.239426] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.239557] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.239684] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.239807] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.239927] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.240056] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2260.240179] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2262.220728] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2262.221127] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.220508] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.220886] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.220886] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2266.221561] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.787653] env[62346]: WARNING oslo_vmware.rw_handles [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2272.787653] env[62346]: ERROR oslo_vmware.rw_handles [ 2272.788389] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2272.789963] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2272.790218] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Copying Virtual Disk [datastore2] vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/0feeff29-c55d-427e-a5ef-c7eef245670f/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2272.790504] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b358f5a-6027-4c1d-a9ce-2cdf63f5a157 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.799751] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 2272.799751] env[62346]: value = "task-4891812" [ 2272.799751] env[62346]: _type = "Task" [ 2272.799751] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.809059] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': task-4891812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.311126] env[62346]: DEBUG oslo_vmware.exceptions [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2273.311126] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2273.311342] env[62346]: ERROR nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2273.311342] env[62346]: Faults: ['InvalidArgument'] [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Traceback (most recent call last): [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] yield resources [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self.driver.spawn(context, instance, image_meta, [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self._fetch_image_if_missing(context, vi) [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] image_cache(vi, tmp_image_ds_loc) [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] vm_util.copy_virtual_disk( [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] session._wait_for_task(vmdk_copy_task) [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] return self.wait_for_task(task_ref) [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] return evt.wait() [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] result = hub.switch() [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] return self.greenlet.switch() [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self.f(*self.args, **self.kw) [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] raise exceptions.translate_fault(task_info.error) [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Faults: ['InvalidArgument'] [ 2273.311342] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] [ 2273.312354] env[62346]: INFO nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Terminating instance [ 2273.313296] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.313500] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2273.313749] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea1c31b5-3848-42f4-a011-b0fefc27bdeb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.315990] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2273.316199] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2273.316915] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068f77fc-238a-4d8a-8486-9d10ab538dee {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.323949] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2273.324184] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68147437-f6ab-4ee1-b0b1-a4fdcd13f58f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.326457] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2273.326630] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2273.327630] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f55b94be-1abd-460e-89d6-8017e3bb2ae6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.332539] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Waiting for the task: (returnval){ [ 2273.332539] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]528505ac-064c-58f4-b95d-e8c4db2e4d63" [ 2273.332539] env[62346]: _type = "Task" [ 2273.332539] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.340671] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]528505ac-064c-58f4-b95d-e8c4db2e4d63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.408233] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2273.408455] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2273.408606] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Deleting the datastore file [datastore2] ca0f017f-3bca-401f-8e70-83a7a5061116 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2273.408881] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8971f7e0-7a32-4e6a-a979-bca150ea8466 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.415624] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 2273.415624] env[62346]: value = "task-4891814" [ 2273.415624] env[62346]: _type = "Task" [ 2273.415624] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2273.424017] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': task-4891814, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.843980] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2273.844364] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Creating directory with path [datastore2] vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2273.844420] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0e3b3af-2b5d-403b-b133-519f0c154215 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.856179] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Created directory with path [datastore2] vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2273.856373] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Fetch image to [datastore2] vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2273.856544] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2273.857317] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefa0936-3de2-4933-aa76-8f985cfccd9d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.864280] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d625ea66-8286-4d79-930b-f5afeee9570e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.873503] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adde0fd0-5c9a-49dc-ba6b-0db93e4e366f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.905116] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285950b4-af52-4738-bf58-a818f748b2ff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.911948] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-14641752-be3f-481e-a39f-d8c9f692f015 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.924354] env[62346]: DEBUG oslo_vmware.api [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': task-4891814, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070518} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.924656] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2273.924976] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2273.925306] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2273.925598] env[62346]: INFO nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2273.927958] env[62346]: DEBUG nova.compute.claims [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2273.928159] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2273.928380] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2273.936776] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2273.990587] env[62346]: DEBUG oslo_vmware.rw_handles [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2274.053426] env[62346]: DEBUG oslo_vmware.rw_handles [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2274.053691] env[62346]: DEBUG oslo_vmware.rw_handles [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2274.120546] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51ce7cd-ef4c-4914-a940-8d8fee45e619 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.128529] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8c6181-f2eb-4c75-af07-a6e341ee43ad {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.158261] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33c792d-2a33-4c53-ba0e-b984fdcf2b0c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.167159] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb47299-fe75-49cd-93ee-b39ebcf084d9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.182408] env[62346]: DEBUG nova.compute.provider_tree [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2274.193979] env[62346]: DEBUG nova.scheduler.client.report [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2274.210220] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.210779] env[62346]: ERROR nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2274.210779] env[62346]: Faults: ['InvalidArgument'] [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Traceback (most recent call last): [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self.driver.spawn(context, instance, image_meta, [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self._fetch_image_if_missing(context, vi) [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] image_cache(vi, tmp_image_ds_loc) [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] vm_util.copy_virtual_disk( [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] session._wait_for_task(vmdk_copy_task) [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] return self.wait_for_task(task_ref) [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] return evt.wait() [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] result = hub.switch() [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] return self.greenlet.switch() [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] self.f(*self.args, **self.kw) [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] raise exceptions.translate_fault(task_info.error) [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Faults: ['InvalidArgument'] [ 2274.210779] env[62346]: ERROR nova.compute.manager [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] [ 2274.211628] env[62346]: DEBUG nova.compute.utils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2274.213269] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Build of instance ca0f017f-3bca-401f-8e70-83a7a5061116 was re-scheduled: A specified parameter was not correct: fileType [ 2274.213269] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2274.213665] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2274.213840] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2274.214049] env[62346]: DEBUG nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2274.214250] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2274.513712] env[62346]: DEBUG nova.network.neutron [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2274.529683] env[62346]: INFO nova.compute.manager [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Took 0.31 seconds to deallocate network for instance. [ 2274.659237] env[62346]: INFO nova.scheduler.client.report [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Deleted allocations for instance ca0f017f-3bca-401f-8e70-83a7a5061116 [ 2274.687699] env[62346]: DEBUG oslo_concurrency.lockutils [None req-09747eb1-bdae-4e0a-91fb-f9dee89d6d15 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 484.492s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.687977] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 288.280s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.688249] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "ca0f017f-3bca-401f-8e70-83a7a5061116-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.688463] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.688661] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.690968] env[62346]: INFO nova.compute.manager [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Terminating instance [ 2274.692774] env[62346]: DEBUG nova.compute.manager [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2274.692971] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2274.693477] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f0783ca-7ddf-4abe-95e8-cc2c92ebc80f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.703318] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005baac6-9d19-4f6f-9e89-507fbb447beb {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.732947] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ca0f017f-3bca-401f-8e70-83a7a5061116 could not be found. [ 2274.733155] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2274.733507] env[62346]: INFO nova.compute.manager [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2274.733711] env[62346]: DEBUG oslo.service.loopingcall [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2274.733903] env[62346]: DEBUG nova.compute.manager [-] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2274.734008] env[62346]: DEBUG nova.network.neutron [-] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2274.795336] env[62346]: DEBUG nova.network.neutron [-] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2274.804655] env[62346]: INFO nova.compute.manager [-] [instance: ca0f017f-3bca-401f-8e70-83a7a5061116] Took 0.07 seconds to deallocate network for instance. [ 2274.918327] env[62346]: DEBUG oslo_concurrency.lockutils [None req-e4f2f8f5-7050-4163-9010-134c7e0df908 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Lock "ca0f017f-3bca-401f-8e70-83a7a5061116" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.230s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2275.578365] env[62346]: DEBUG oslo_concurrency.lockutils [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "567d2348-be32-4158-a5e0-0a724ca81299" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.220325] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.220217] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.233586] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.233812] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.233971] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.234144] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2317.235481] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e9c1d-fb70-4496-b1a0-01369b0a47b8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.245048] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc2f9a1-a9e9-42df-a4ef-9f770cb5cb92 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.259361] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1492d3cc-f407-426f-b9be-bea55c8cdb30 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.266534] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac06151-436f-4502-b300-b97a4ab95891 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.301092] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180528MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2317.301092] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.301092] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.376083] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.376258] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.376389] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.376514] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 567d2348-be32-4158-a5e0-0a724ca81299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.376630] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2511c176-53c8-4b4c-99a7-597215dba604 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.376745] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1f945968-d3bb-4895-b084-373e02684bea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2317.376934] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2317.377101] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] stats={'failed_builds': '89', 'num_instances': '6', 'num_vm_building': '6', 'num_task_deleting': '4', 'num_os_type_None': '6', 'num_proj_73789014fd6240a893858419fd97d5b2': '1', 'io_workload': '6', 'num_proj_fc3387b28c6e4b6d80fbe6cb9f955fe1': '1', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_task_spawning': '2', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_876110afb8a6448fa8c62675fcfc8180': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2317.458394] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1921f769-0b64-47cf-9c64-8e9c7fc444b6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.466450] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03add7e-8ad6-4afe-a3da-16d5e9f11280 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.497520] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052c5d63-c765-4709-8e71-6d2a08ae2d99 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.505440] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a2e45a-6b73-4335-941f-9efb12fb3abe {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.518792] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2317.529074] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2317.543673] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2317.543869] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.243s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.539624] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.220627] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.220811] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2320.220853] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2320.236865] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2320.237037] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2320.237167] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2320.237295] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2320.237421] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2320.237545] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2320.237670] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2321.099073] env[62346]: WARNING oslo_vmware.rw_handles [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2321.099073] env[62346]: ERROR oslo_vmware.rw_handles [ 2321.099719] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2321.101589] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2321.101852] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Copying Virtual Disk [datastore2] vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/56f1d497-e361-4444-acf8-ef26ff550df3/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2321.102151] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42ea3ed9-d4ed-445d-89aa-6a2abdf2499a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.112316] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Waiting for the task: (returnval){ [ 2321.112316] env[62346]: value = "task-4891815" [ 2321.112316] env[62346]: _type = "Task" [ 2321.112316] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.120148] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Task: {'id': task-4891815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.622402] env[62346]: DEBUG oslo_vmware.exceptions [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2321.622708] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2321.623222] env[62346]: ERROR nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2321.623222] env[62346]: Faults: ['InvalidArgument'] [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Traceback (most recent call last): [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] yield resources [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self.driver.spawn(context, instance, image_meta, [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self._fetch_image_if_missing(context, vi) [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] image_cache(vi, tmp_image_ds_loc) [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] vm_util.copy_virtual_disk( [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] session._wait_for_task(vmdk_copy_task) [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] return self.wait_for_task(task_ref) [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] return evt.wait() [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] result = hub.switch() [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] return self.greenlet.switch() [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self.f(*self.args, **self.kw) [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] raise exceptions.translate_fault(task_info.error) [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Faults: ['InvalidArgument'] [ 2321.623222] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] [ 2321.624074] env[62346]: INFO nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Terminating instance [ 2321.625103] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2321.625305] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2321.625565] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cd3eb3e-0d0e-401e-9192-d41131bfab68 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.627727] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2321.627938] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2321.628649] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50d768a-cb4c-4cbc-925f-2bc5bda6904c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.635614] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2321.635832] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d329d7cb-7430-4a13-88a1-a2416dfd4bf4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.637923] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2321.638104] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2321.639082] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa18ff1-9f5d-4cad-b88d-66466a8463e8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.644342] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Waiting for the task: (returnval){ [ 2321.644342] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523d67ab-3eef-af83-340a-8ed3d1e56269" [ 2321.644342] env[62346]: _type = "Task" [ 2321.644342] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.652111] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]523d67ab-3eef-af83-340a-8ed3d1e56269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.713779] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2321.713979] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2321.714178] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Deleting the datastore file [datastore2] e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2321.714441] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d8dd779-c8b3-4ef3-814c-3615eb13fcf3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.720915] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Waiting for the task: (returnval){ [ 2321.720915] env[62346]: value = "task-4891817" [ 2321.720915] env[62346]: _type = "Task" [ 2321.720915] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.729442] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Task: {'id': task-4891817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.155434] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2322.155828] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Creating directory with path [datastore2] vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2322.155945] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aba6b11f-a0d7-4d4b-9fa2-56048fd252de {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.168099] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Created directory with path [datastore2] vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2322.168291] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Fetch image to [datastore2] vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2322.168468] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2322.169950] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0323960-752c-49bf-b1fb-eea61b98e694 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.176568] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b544caaf-51af-4cb5-ba10-7126c4b466c3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.186542] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec71bdd8-6d7c-4d02-b160-554f14c9538e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.217112] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d9f220-2c18-485f-8a06-e8003fca884e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.227076] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2ac11557-27a4-407e-ba5e-dc0218aad3fd {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.235634] env[62346]: DEBUG oslo_vmware.api [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Task: {'id': task-4891817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066507} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2322.235912] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2322.236110] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2322.236281] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2322.236452] env[62346]: INFO nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2322.238779] env[62346]: DEBUG nova.compute.claims [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2322.238954] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2322.239180] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.251967] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2322.305573] env[62346]: DEBUG oslo_vmware.rw_handles [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2322.365654] env[62346]: DEBUG oslo_vmware.rw_handles [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2322.365846] env[62346]: DEBUG oslo_vmware.rw_handles [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2322.407166] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4435a651-cada-4008-b7bc-95064fc57411 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.414959] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31eb650-03d4-45c5-a307-2b33e45225a2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.444520] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647f43dc-252b-42e8-b6db-d8142f842ad6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.452063] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1891edd6-d240-45c7-83c3-b8bb676470c4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.466076] env[62346]: DEBUG nova.compute.provider_tree [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2322.474770] env[62346]: DEBUG nova.scheduler.client.report [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2322.488473] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.249s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.489049] env[62346]: ERROR nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2322.489049] env[62346]: Faults: ['InvalidArgument'] [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Traceback (most recent call last): [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self.driver.spawn(context, instance, image_meta, [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self._fetch_image_if_missing(context, vi) [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] image_cache(vi, tmp_image_ds_loc) [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] vm_util.copy_virtual_disk( [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] session._wait_for_task(vmdk_copy_task) [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] return self.wait_for_task(task_ref) [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] return evt.wait() [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] result = hub.switch() [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] return self.greenlet.switch() [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] self.f(*self.args, **self.kw) [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] raise exceptions.translate_fault(task_info.error) [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Faults: ['InvalidArgument'] [ 2322.489049] env[62346]: ERROR nova.compute.manager [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] [ 2322.489950] env[62346]: DEBUG nova.compute.utils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2322.491555] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Build of instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf was re-scheduled: A specified parameter was not correct: fileType [ 2322.491555] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2322.491946] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2322.492120] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2322.492291] env[62346]: DEBUG nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2322.492459] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2322.841952] env[62346]: DEBUG nova.network.neutron [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2322.854671] env[62346]: INFO nova.compute.manager [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Took 0.36 seconds to deallocate network for instance. [ 2322.956725] env[62346]: INFO nova.scheduler.client.report [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Deleted allocations for instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf [ 2322.980573] env[62346]: DEBUG oslo_concurrency.lockutils [None req-1fe6c89f-5945-4c2d-a4dd-9bc174903b87 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 524.674s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.980710] env[62346]: DEBUG oslo_concurrency.lockutils [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 329.080s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.980828] env[62346]: DEBUG oslo_concurrency.lockutils [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Acquiring lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2322.981079] env[62346]: DEBUG oslo_concurrency.lockutils [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.981261] env[62346]: DEBUG oslo_concurrency.lockutils [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.983529] env[62346]: INFO nova.compute.manager [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Terminating instance [ 2322.986161] env[62346]: DEBUG nova.compute.manager [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2322.986360] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2322.986651] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09d553d3-13b1-4c8d-8539-666cfe114dd1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.999356] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed91972-23e7-4e86-994e-110810c410af {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.029840] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf could not be found. [ 2323.030111] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2323.030304] env[62346]: INFO nova.compute.manager [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2323.030556] env[62346]: DEBUG oslo.service.loopingcall [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2323.030844] env[62346]: DEBUG nova.compute.manager [-] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2323.030938] env[62346]: DEBUG nova.network.neutron [-] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2323.059359] env[62346]: DEBUG nova.network.neutron [-] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2323.068450] env[62346]: INFO nova.compute.manager [-] [instance: e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf] Took 0.04 seconds to deallocate network for instance. [ 2323.163336] env[62346]: DEBUG oslo_concurrency.lockutils [None req-54f1c91f-de77-441d-b671-4802cf941468 tempest-ServerMetadataNegativeTestJSON-248257229 tempest-ServerMetadataNegativeTestJSON-248257229-project-member] Lock "e9c2d2d8-a219-4ec2-b0c8-10f4ab60ecdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.219558] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2323.219785] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.220179] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.220568] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2326.220736] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.221056] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.221164] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2326.231035] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] There are 0 instances to clean {{(pid=62346) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2327.230777] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.658324] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.658753] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Getting list of instances from cluster (obj){ [ 2331.658753] env[62346]: value = "domain-c8" [ 2331.658753] env[62346]: _type = "ClusterComputeResource" [ 2331.658753] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2331.659826] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb454d8-7be2-4513-b647-932acdc3cdc7 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.673149] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Got total of 5 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2332.219773] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.219984] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Cleaning up deleted instances with incomplete migration {{(pid=62346) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2336.226569] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2337.220342] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2337.220638] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.642194] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.657950] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Getting list of instances from cluster (obj){ [ 2360.657950] env[62346]: value = "domain-c8" [ 2360.657950] env[62346]: _type = "ClusterComputeResource" [ 2360.657950] env[62346]: } {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2360.659769] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318ae341-cdee-477a-b33c-1a3deb012aa4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.674123] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Got total of 5 instances {{(pid=62346) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2360.674343] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2360.674494] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid fbaf20c0-294c-4e37-b0f4-ee432f00c911 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2360.674678] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 567d2348-be32-4158-a5e0-0a724ca81299 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2360.674814] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 2511c176-53c8-4b4c-99a7-597215dba604 {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2360.674965] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Triggering sync for uuid 1f945968-d3bb-4895-b084-373e02684bea {{(pid=62346) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2360.675350] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.675603] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.675809] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "567d2348-be32-4158-a5e0-0a724ca81299" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.676048] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "2511c176-53c8-4b4c-99a7-597215dba604" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.676264] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "1f945968-d3bb-4895-b084-373e02684bea" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.114791] env[62346]: WARNING oslo_vmware.rw_handles [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2371.114791] env[62346]: ERROR oslo_vmware.rw_handles [ 2371.115462] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2371.117422] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2371.117749] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Copying Virtual Disk [datastore2] vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/ded82669-bd68-45d6-859b-f01b007afb2e/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2371.118074] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85674900-568f-4e10-ac26-5a7a42a2bcff {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.126656] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Waiting for the task: (returnval){ [ 2371.126656] env[62346]: value = "task-4891818" [ 2371.126656] env[62346]: _type = "Task" [ 2371.126656] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.135360] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Task: {'id': task-4891818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.637516] env[62346]: DEBUG oslo_vmware.exceptions [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2371.637845] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2371.638382] env[62346]: ERROR nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2371.638382] env[62346]: Faults: ['InvalidArgument'] [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Traceback (most recent call last): [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] yield resources [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self.driver.spawn(context, instance, image_meta, [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self._fetch_image_if_missing(context, vi) [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] image_cache(vi, tmp_image_ds_loc) [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] vm_util.copy_virtual_disk( [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] session._wait_for_task(vmdk_copy_task) [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] return self.wait_for_task(task_ref) [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] return evt.wait() [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] result = hub.switch() [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] return self.greenlet.switch() [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self.f(*self.args, **self.kw) [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] raise exceptions.translate_fault(task_info.error) [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Faults: ['InvalidArgument'] [ 2371.638382] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] [ 2371.639380] env[62346]: INFO nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Terminating instance [ 2371.640321] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2371.640558] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2371.640814] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b3be7d8-886b-4d89-aeae-bd446199a16c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.644199] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2371.644412] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2371.645171] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd42103f-e843-40bf-9aea-4b4ff24ad3fa {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.652265] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2371.652489] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b40add9f-2a90-42ac-9a54-2dc1b7e78f40 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.654798] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2371.654974] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2371.655968] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11ace06e-142e-4ac7-80ac-2084f811a4b2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.661397] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 2371.661397] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52b27abc-0bc1-ff8c-c8f1-1b08d42f71e3" [ 2371.661397] env[62346]: _type = "Task" [ 2371.661397] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.669624] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52b27abc-0bc1-ff8c-c8f1-1b08d42f71e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.731837] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2371.732094] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2371.732284] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Deleting the datastore file [datastore2] f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2371.732565] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c866cf5e-e598-4391-85ef-24747a719414 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.739507] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Waiting for the task: (returnval){ [ 2371.739507] env[62346]: value = "task-4891820" [ 2371.739507] env[62346]: _type = "Task" [ 2371.739507] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.747560] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Task: {'id': task-4891820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.171897] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2372.172290] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating directory with path [datastore2] vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2372.172429] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08aa69fd-4b9b-4e2f-8785-454652cd6b5e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.196486] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Created directory with path [datastore2] vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2372.196682] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Fetch image to [datastore2] vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2372.196865] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2372.197655] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4372084-6e0e-446a-aaee-c57fb3b0ab23 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.204695] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d855cb-082c-4c8b-af0d-d46d332e23ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.214974] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671fdede-0039-4217-a048-875cd808d7a6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.248253] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7c362f-acf0-40fd-a288-37b093720cc0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.256745] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b3b48898-f96b-4399-965e-6a88e0815f06 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.258512] env[62346]: DEBUG oslo_vmware.api [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Task: {'id': task-4891820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069268} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2372.258817] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2372.259014] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2372.259212] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2372.259419] env[62346]: INFO nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2372.262025] env[62346]: DEBUG nova.compute.claims [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2372.262225] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2372.262442] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.284403] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2372.414165] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294b9852-3a67-468f-92b5-91e311cb6a75 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.423493] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9999aef-8f73-4803-8cbe-91950cfbac30 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.457976] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1eae9a-fbfc-44fa-91e7-8923ee219efc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.466181] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3224656d-50e7-46b0-88ea-4f1a89207ff8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.482131] env[62346]: DEBUG nova.compute.provider_tree [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2372.488420] env[62346]: DEBUG oslo_vmware.rw_handles [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2372.543702] env[62346]: DEBUG nova.scheduler.client.report [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2372.549354] env[62346]: DEBUG oslo_vmware.rw_handles [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2372.549536] env[62346]: DEBUG oslo_vmware.rw_handles [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2372.559520] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.297s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2372.560166] env[62346]: ERROR nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2372.560166] env[62346]: Faults: ['InvalidArgument'] [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Traceback (most recent call last): [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self.driver.spawn(context, instance, image_meta, [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self._fetch_image_if_missing(context, vi) [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] image_cache(vi, tmp_image_ds_loc) [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] vm_util.copy_virtual_disk( [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] session._wait_for_task(vmdk_copy_task) [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] return self.wait_for_task(task_ref) [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] return evt.wait() [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] result = hub.switch() [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] return self.greenlet.switch() [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] self.f(*self.args, **self.kw) [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] raise exceptions.translate_fault(task_info.error) [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Faults: ['InvalidArgument'] [ 2372.560166] env[62346]: ERROR nova.compute.manager [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] [ 2372.560985] env[62346]: DEBUG nova.compute.utils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2372.562826] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Build of instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 was re-scheduled: A specified parameter was not correct: fileType [ 2372.562826] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2372.563234] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2372.563409] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2372.563579] env[62346]: DEBUG nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2372.563743] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2373.002892] env[62346]: DEBUG nova.network.neutron [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.014942] env[62346]: INFO nova.compute.manager [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Took 0.45 seconds to deallocate network for instance. [ 2373.153021] env[62346]: INFO nova.scheduler.client.report [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Deleted allocations for instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 [ 2373.177859] env[62346]: DEBUG oslo_concurrency.lockutils [None req-68393f85-ccb7-48e7-970d-e1c155852a7a tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 527.764s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.178197] env[62346]: DEBUG oslo_concurrency.lockutils [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 332.368s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.178363] env[62346]: DEBUG oslo_concurrency.lockutils [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Acquiring lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.178593] env[62346]: DEBUG oslo_concurrency.lockutils [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.178768] env[62346]: DEBUG oslo_concurrency.lockutils [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.180779] env[62346]: INFO nova.compute.manager [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Terminating instance [ 2373.184570] env[62346]: DEBUG nova.compute.manager [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2373.184758] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2373.185040] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3367e1bb-af3e-41c0-b400-884369eae2a0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.194118] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a850a5-1be3-48a4-9109-8c089bbbe916 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.224126] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f72ad8bf-f599-4e5d-8ccc-2f7de9becb89 could not be found. [ 2373.224354] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2373.224537] env[62346]: INFO nova.compute.manager [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2373.224785] env[62346]: DEBUG oslo.service.loopingcall [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2373.225025] env[62346]: DEBUG nova.compute.manager [-] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2373.225125] env[62346]: DEBUG nova.network.neutron [-] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2373.258950] env[62346]: DEBUG nova.network.neutron [-] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.266839] env[62346]: INFO nova.compute.manager [-] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] Took 0.04 seconds to deallocate network for instance. [ 2373.369655] env[62346]: DEBUG oslo_concurrency.lockutils [None req-56997e92-064e-4466-b2c2-961bc75267be tempest-ServerActionsTestJSON-509377772 tempest-ServerActionsTestJSON-509377772-project-member] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.370540] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.695s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.370722] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: f72ad8bf-f599-4e5d-8ccc-2f7de9becb89] During sync_power_state the instance has a pending task (deleting). Skip. [ 2373.370899] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "f72ad8bf-f599-4e5d-8ccc-2f7de9becb89" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.220556] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2379.232859] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.232859] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.232859] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.233121] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2379.234296] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c05229-b4d2-4c5b-99dc-d4cb70a01f3d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.243523] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6ddeaf-4480-4773-b898-f9075fecc9ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.258985] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94532c21-4720-40b3-91a8-ae41f530e876 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.266209] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0659e8e0-cba3-40aa-92e0-f04a24246cf1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.295843] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180578MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2379.296015] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.296284] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.390840] env[62346]: DEBUG oslo_concurrency.lockutils [None req-8d0a7271-9478-4387-94d4-be1abe67076f tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquiring lock "2511c176-53c8-4b4c-99a7-597215dba604" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.440296] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2379.440668] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 567d2348-be32-4158-a5e0-0a724ca81299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2379.440944] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2511c176-53c8-4b4c-99a7-597215dba604 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2379.441214] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1f945968-d3bb-4895-b084-373e02684bea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2379.441493] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2379.441755] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '91', 'num_instances': '4', 'num_vm_building': '4', 'num_task_deleting': '2', 'num_os_type_None': '4', 'num_proj_20adb521b1574b8581a0c368923e38eb': '1', 'io_workload': '4', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'num_task_spawning': '2', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_proj_876110afb8a6448fa8c62675fcfc8180': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2379.460342] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing inventories for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2379.475876] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating ProviderTree inventory for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2379.476051] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Updating inventory in ProviderTree for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2379.488055] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing aggregate associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, aggregates: None {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2379.510874] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Refreshing trait associations for resource provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62346) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2379.587881] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9ac25a-dfb1-4510-b079-5a51e7885849 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.595566] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8086f733-8924-46aa-a225-edee9f7e2a1b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.626191] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f4a71f-6233-43d9-b5d2-44a30b5d30c5 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.633437] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0623a935-3918-4a93-b829-1bdbc1ed44b2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.646739] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2379.654960] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2379.669808] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2379.670039] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.374s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.664942] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2381.665365] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2381.665365] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2381.665498] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2381.681299] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2381.681463] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2381.681636] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2381.681720] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2381.681822] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2384.219658] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.220037] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2385.220163] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2385.220592] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2388.220574] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.220916] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2399.221282] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.411554] env[62346]: WARNING oslo_vmware.rw_handles [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2421.411554] env[62346]: ERROR oslo_vmware.rw_handles [ 2421.412265] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2421.413884] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2421.414336] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Copying Virtual Disk [datastore2] vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/ca96878f-6435-4a09-9a54-eb96c2068c51/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2421.414466] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6fa87ab-b761-40fa-9a78-0092b67e80ea {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.423282] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 2421.423282] env[62346]: value = "task-4891821" [ 2421.423282] env[62346]: _type = "Task" [ 2421.423282] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.431456] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.935051] env[62346]: DEBUG oslo_vmware.exceptions [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2421.935358] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2421.935915] env[62346]: ERROR nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2421.935915] env[62346]: Faults: ['InvalidArgument'] [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Traceback (most recent call last): [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] yield resources [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self.driver.spawn(context, instance, image_meta, [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self._fetch_image_if_missing(context, vi) [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] image_cache(vi, tmp_image_ds_loc) [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] vm_util.copy_virtual_disk( [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] session._wait_for_task(vmdk_copy_task) [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] return self.wait_for_task(task_ref) [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] return evt.wait() [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] result = hub.switch() [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] return self.greenlet.switch() [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self.f(*self.args, **self.kw) [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] raise exceptions.translate_fault(task_info.error) [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Faults: ['InvalidArgument'] [ 2421.935915] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] [ 2421.936935] env[62346]: INFO nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Terminating instance [ 2421.937916] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2421.938136] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2421.938378] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c20e18f0-c712-4d5e-a3f7-0598fd845c9a {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.941862] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2421.942083] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2421.942864] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860374f5-4a9e-4259-81ef-c1b3504d3bad {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.950715] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2421.951772] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b55adebc-8a85-433b-9238-fdf7293f5c0b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.953299] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2421.953506] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2421.954252] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca2a1fdd-3228-4c63-8f01-1a6bf84cb6ce {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.960223] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2421.960223] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d51605-30ab-114f-e125-c394e28d414b" [ 2421.960223] env[62346]: _type = "Task" [ 2421.960223] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.972508] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52d51605-30ab-114f-e125-c394e28d414b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.022618] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2422.022864] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2422.022981] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleting the datastore file [datastore2] fbaf20c0-294c-4e37-b0f4-ee432f00c911 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2422.023315] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3151d2d9-5d38-4db6-bcaf-78e3ee4720ac {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.030578] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for the task: (returnval){ [ 2422.030578] env[62346]: value = "task-4891823" [ 2422.030578] env[62346]: _type = "Task" [ 2422.030578] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.039299] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.470413] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2422.470701] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating directory with path [datastore2] vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2422.470913] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3761027a-9f65-4a39-a6f9-832dbc35b7e4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.483219] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Created directory with path [datastore2] vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2422.483427] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Fetch image to [datastore2] vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2422.483598] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2422.484379] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154f5ac8-415b-438e-bfb4-44a94b7d42c2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.491767] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45fb98e-4e34-473e-bf72-6b4e533951e6 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.501392] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb37d6a-0843-4013-8cfe-f2163e4a7e36 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.536990] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69030373-1334-4096-958f-5750e44e4882 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.544429] env[62346]: DEBUG oslo_vmware.api [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Task: {'id': task-4891823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076642} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.545910] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2422.546116] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2422.546294] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2422.546466] env[62346]: INFO nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2422.548313] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7707f784-44cb-4aaa-a23c-01144c027148 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.550218] env[62346]: DEBUG nova.compute.claims [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2422.550390] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2422.550598] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2422.574096] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2422.649147] env[62346]: DEBUG oslo_vmware.rw_handles [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2422.710747] env[62346]: DEBUG oslo_vmware.rw_handles [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2422.710944] env[62346]: DEBUG oslo_vmware.rw_handles [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2422.732393] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8011bdc8-7bfe-4f7c-9b05-d8e9d6b9c11e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.740531] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6666584-db0c-4da7-81d4-ea8db622bbfc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.771391] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5eadb0-8d31-4950-ac94-f25cc7dc7d12 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.779257] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f828ff-a0f4-4a2e-a78e-25fa260a66fc {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.792719] env[62346]: DEBUG nova.compute.provider_tree [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2422.802893] env[62346]: DEBUG nova.scheduler.client.report [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2422.816973] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.266s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2422.817522] env[62346]: ERROR nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2422.817522] env[62346]: Faults: ['InvalidArgument'] [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Traceback (most recent call last): [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self.driver.spawn(context, instance, image_meta, [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self._fetch_image_if_missing(context, vi) [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] image_cache(vi, tmp_image_ds_loc) [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] vm_util.copy_virtual_disk( [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] session._wait_for_task(vmdk_copy_task) [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] return self.wait_for_task(task_ref) [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] return evt.wait() [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] result = hub.switch() [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] return self.greenlet.switch() [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] self.f(*self.args, **self.kw) [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] raise exceptions.translate_fault(task_info.error) [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Faults: ['InvalidArgument'] [ 2422.817522] env[62346]: ERROR nova.compute.manager [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] [ 2422.818449] env[62346]: DEBUG nova.compute.utils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2422.819685] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Build of instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 was re-scheduled: A specified parameter was not correct: fileType [ 2422.819685] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2422.820075] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2422.820252] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2422.820422] env[62346]: DEBUG nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2422.820597] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2423.186048] env[62346]: DEBUG nova.network.neutron [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2423.199469] env[62346]: INFO nova.compute.manager [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Took 0.38 seconds to deallocate network for instance. [ 2423.310404] env[62346]: INFO nova.scheduler.client.report [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Deleted allocations for instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 [ 2423.336411] env[62346]: DEBUG oslo_concurrency.lockutils [None req-07894c2f-6acf-456e-8815-79bf9146395c tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 536.118s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.336726] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 340.960s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2423.336995] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Acquiring lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2423.337267] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2423.337508] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.339975] env[62346]: INFO nova.compute.manager [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Terminating instance [ 2423.341909] env[62346]: DEBUG nova.compute.manager [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2423.342143] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2423.342703] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-860ff7ea-a452-4852-9d39-92f9d0ef3623 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.355257] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cccb9ca-3cc2-4169-8156-195408cf5a42 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2423.383998] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fbaf20c0-294c-4e37-b0f4-ee432f00c911 could not be found. [ 2423.384246] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2423.384604] env[62346]: INFO nova.compute.manager [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2423.384700] env[62346]: DEBUG oslo.service.loopingcall [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2423.384914] env[62346]: DEBUG nova.compute.manager [-] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2423.385019] env[62346]: DEBUG nova.network.neutron [-] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2423.431579] env[62346]: DEBUG nova.network.neutron [-] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2423.439986] env[62346]: INFO nova.compute.manager [-] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] Took 0.05 seconds to deallocate network for instance. [ 2423.529797] env[62346]: DEBUG oslo_concurrency.lockutils [None req-fb1386e8-640b-49ac-a5d7-b301fa254836 tempest-AttachVolumeNegativeTest-93734980 tempest-AttachVolumeNegativeTest-93734980-project-member] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2423.531355] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 62.855s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2423.531355] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: fbaf20c0-294c-4e37-b0f4-ee432f00c911] During sync_power_state the instance has a pending task (deleting). Skip. [ 2423.531506] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "fbaf20c0-294c-4e37-b0f4-ee432f00c911" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2439.220209] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2439.232418] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2439.232653] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2439.232822] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2439.233068] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62346) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2439.234321] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f02b7e8-2166-4202-a85e-aaa79a729ea8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.243944] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e780f5-2d1c-4e31-989d-02904189f7f0 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.259159] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fecb62-4e47-4d6b-8cf3-6fae520c2553 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.266255] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47577524-4ed8-4571-bfdf-684b35eb3203 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.296836] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180580MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62346) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2439.297030] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2439.297234] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2439.356113] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 567d2348-be32-4158-a5e0-0a724ca81299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2439.356318] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 2511c176-53c8-4b4c-99a7-597215dba604 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2439.356453] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Instance 1f945968-d3bb-4895-b084-373e02684bea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62346) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2439.356641] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2439.356850] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=100GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] stats={'failed_builds': '92', 'num_instances': '3', 'num_vm_building': '3', 'num_task_deleting': '2', 'num_os_type_None': '3', 'num_proj_abc1ead3f9a9442ca0b85f152f94fe6b': '1', 'io_workload': '3', 'num_proj_5afa33f3f2b94e68a5161002a9718f78': '1', 'num_task_spawning': '1', 'num_proj_876110afb8a6448fa8c62675fcfc8180': '1'} {{(pid=62346) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2439.410822] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f563060-6722-4ff3-b001-cd260fd2c42f {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.418917] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623f426d-e6f7-4e1f-9e6d-28fb629a7bd3 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.448915] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3334a5e-8440-43c3-aadc-c1b5d0d6a877 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.456699] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde927ee-75fd-4b83-88d9-af6c2436ce68 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.471770] env[62346]: DEBUG nova.compute.provider_tree [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2439.480731] env[62346]: DEBUG nova.scheduler.client.report [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2439.494731] env[62346]: DEBUG nova.compute.resource_tracker [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62346) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2439.494937] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.198s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2440.396232] env[62346]: DEBUG oslo_concurrency.lockutils [None req-27802b91-fdb5-4d70-bb31-cd054df8aa04 tempest-AttachVolumeTestJSON-2044793512 tempest-AttachVolumeTestJSON-2044793512-project-member] Acquiring lock "1f945968-d3bb-4895-b084-373e02684bea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2442.490292] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.490645] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.490817] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Starting heal instance info cache {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2442.490900] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Rebuilding the list of instances to heal {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2442.503433] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2442.503594] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2442.503716] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 1f945968-d3bb-4895-b084-373e02684bea] Skipping network cache update for instance because it is Building. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2442.503847] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Didn't find any instances for network info cache update. {{(pid=62346) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2445.220747] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.219612] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.219848] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.220015] env[62346]: DEBUG nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62346) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2449.221167] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2450.220988] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2459.215686] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2460.220623] env[62346]: DEBUG oslo_service.periodic_task [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62346) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2471.426445] env[62346]: WARNING oslo_vmware.rw_handles [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles response.begin() [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2471.426445] env[62346]: ERROR oslo_vmware.rw_handles [ 2471.427221] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Downloaded image file data 9feb52a6-5366-4257-bc23-471887ce1370 to vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2471.428925] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Caching image {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2471.429210] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Copying Virtual Disk [datastore2] vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk to [datastore2] vmware_temp/11b77d77-2f25-426e-afb2-521814236477/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk {{(pid=62346) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2471.429503] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6eae716e-b64d-42e7-bbb1-2ebfb3742f6d {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.437879] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2471.437879] env[62346]: value = "task-4891824" [ 2471.437879] env[62346]: _type = "Task" [ 2471.437879] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2471.446019] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891824, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2471.948546] env[62346]: DEBUG oslo_vmware.exceptions [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Fault InvalidArgument not matched. {{(pid=62346) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2471.948913] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2471.949480] env[62346]: ERROR nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2471.949480] env[62346]: Faults: ['InvalidArgument'] [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Traceback (most recent call last): [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] yield resources [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self.driver.spawn(context, instance, image_meta, [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self._fetch_image_if_missing(context, vi) [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] image_cache(vi, tmp_image_ds_loc) [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] vm_util.copy_virtual_disk( [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] session._wait_for_task(vmdk_copy_task) [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] return self.wait_for_task(task_ref) [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] return evt.wait() [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] result = hub.switch() [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] return self.greenlet.switch() [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self.f(*self.args, **self.kw) [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] raise exceptions.translate_fault(task_info.error) [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Faults: ['InvalidArgument'] [ 2471.949480] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] [ 2471.951232] env[62346]: INFO nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Terminating instance [ 2471.951453] env[62346]: DEBUG oslo_concurrency.lockutils [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2471.951610] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2471.951861] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c812ee20-827c-47c6-9070-8a404e17d5d8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.954306] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2471.954503] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2471.955358] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676145e8-293b-4a82-8097-3584f73b0555 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.963284] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Unregistering the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2471.964381] env[62346]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cfc83ea-6c8a-4c15-a038-050298b5fa86 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.965942] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2471.966133] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62346) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2471.966797] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3df8162-4323-4f21-8fe5-501e22bc3c8c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.972381] env[62346]: DEBUG oslo_vmware.api [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Waiting for the task: (returnval){ [ 2471.972381] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5229d193-858c-c739-5ee9-a28f94220be6" [ 2471.972381] env[62346]: _type = "Task" [ 2471.972381] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2471.980698] env[62346]: DEBUG oslo_vmware.api [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]5229d193-858c-c739-5ee9-a28f94220be6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2472.046208] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Unregistered the VM {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2472.046451] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Deleting contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2472.046636] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleting the datastore file [datastore2] 567d2348-be32-4158-a5e0-0a724ca81299 {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2472.046947] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e301c3ad-bc33-45dd-ba16-628344d88f2e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.053400] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2472.053400] env[62346]: value = "task-4891826" [ 2472.053400] env[62346]: _type = "Task" [ 2472.053400] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2472.061833] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2472.483932] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Preparing fetch location {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2472.484332] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating directory with path [datastore2] vmware_temp/2eb1552d-45ae-4c3a-a588-7fb181e16ea2/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2472.484524] env[62346]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ffa5857-4282-43a2-a333-d0d1e55a992e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.497389] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Created directory with path [datastore2] vmware_temp/2eb1552d-45ae-4c3a-a588-7fb181e16ea2/9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2472.497541] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Fetch image to [datastore2] vmware_temp/2eb1552d-45ae-4c3a-a588-7fb181e16ea2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2472.497692] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to [datastore2] vmware_temp/2eb1552d-45ae-4c3a-a588-7fb181e16ea2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk on the data store datastore2 {{(pid=62346) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2472.498513] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9244834f-f580-4fcf-9192-2b9ebb5c21ef {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.506149] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3843f20f-6d48-4ff1-b0b3-1e5ab36ec664 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.515857] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0aceca4-da80-48be-804d-8db297d75159 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.547705] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e1f8fd-51db-41f8-98b3-44989b443c2c {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.557203] env[62346]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8f50a101-dc94-4154-92ec-8ad24494f6d9 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.563974] env[62346]: DEBUG oslo_vmware.api [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': task-4891826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067443} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2472.564248] env[62346]: DEBUG nova.virt.vmwareapi.ds_util [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted the datastore file {{(pid=62346) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2472.564433] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Deleted contents of the VM from datastore datastore2 {{(pid=62346) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2472.564606] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2472.564834] env[62346]: INFO nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2472.566960] env[62346]: DEBUG nova.compute.claims [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Aborting claim: {{(pid=62346) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2472.567171] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2472.567715] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2472.581019] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2472.581260] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2472.586158] env[62346]: DEBUG nova.virt.vmwareapi.images [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] [instance: 2511c176-53c8-4b4c-99a7-597215dba604] Downloading image file data 9feb52a6-5366-4257-bc23-471887ce1370 to the data store datastore2 {{(pid=62346) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2472.596344] env[62346]: DEBUG nova.compute.manager [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Starting instance... {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2472.639598] env[62346]: DEBUG oslo_vmware.rw_handles [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2eb1552d-45ae-4c3a-a588-7fb181e16ea2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2472.700281] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2472.702657] env[62346]: DEBUG oslo_vmware.rw_handles [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Completed reading data from the image iterator. {{(pid=62346) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2472.702834] env[62346]: DEBUG oslo_vmware.rw_handles [None req-a0ba8680-76ff-4ec9-bceb-11f2be7cf901 tempest-ServersTestJSON-606722541 tempest-ServersTestJSON-606722541-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2eb1552d-45ae-4c3a-a588-7fb181e16ea2/9feb52a6-5366-4257-bc23-471887ce1370/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62346) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2472.738398] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7deff5b4-7218-4490-9908-2924c92888f2 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.748796] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d098525-e10d-431c-afc3-ff3e1c2fbe4b {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.779932] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eece32-8cca-4ae2-a896-0b650c5fc104 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.787929] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbf22ed-0786-4594-8f81-65422943cb26 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.802142] env[62346]: DEBUG nova.compute.provider_tree [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2472.811448] env[62346]: DEBUG nova.scheduler.client.report [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2472.825682] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.258s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2472.826243] env[62346]: ERROR nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2472.826243] env[62346]: Faults: ['InvalidArgument'] [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Traceback (most recent call last): [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self.driver.spawn(context, instance, image_meta, [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self._fetch_image_if_missing(context, vi) [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] image_cache(vi, tmp_image_ds_loc) [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] vm_util.copy_virtual_disk( [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] session._wait_for_task(vmdk_copy_task) [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] return self.wait_for_task(task_ref) [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] return evt.wait() [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] result = hub.switch() [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] return self.greenlet.switch() [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] self.f(*self.args, **self.kw) [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] raise exceptions.translate_fault(task_info.error) [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Faults: ['InvalidArgument'] [ 2472.826243] env[62346]: ERROR nova.compute.manager [instance: 567d2348-be32-4158-a5e0-0a724ca81299] [ 2472.827179] env[62346]: DEBUG nova.compute.utils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] VimFaultException {{(pid=62346) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2472.828156] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.128s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2472.829584] env[62346]: INFO nova.compute.claims [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2472.832244] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Build of instance 567d2348-be32-4158-a5e0-0a724ca81299 was re-scheduled: A specified parameter was not correct: fileType [ 2472.832244] env[62346]: Faults: ['InvalidArgument'] {{(pid=62346) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2472.832642] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Unplugging VIFs for instance {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2472.832813] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62346) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2472.832984] env[62346]: DEBUG nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2472.833163] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2472.944081] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77306edf-bec2-4d4b-8030-1a8332e5adf4 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.953269] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4661515-b0a6-48a8-8b2c-9493e36e0760 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.987346] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a6d6cb-9193-4825-a696-f44dcae82025 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.995273] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42853f8-566d-4aaf-89b9-254a66ea3fda {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.011323] env[62346]: DEBUG nova.compute.provider_tree [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed in ProviderTree for provider: 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c {{(pid=62346) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2473.020828] env[62346]: DEBUG nova.scheduler.client.report [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Inventory has not changed for provider 50caa86a-fe85-4e00-831f-9ba6f7fe3d1c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62346) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2473.035887] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.207s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.035887] env[62346]: DEBUG nova.compute.manager [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Start building networks asynchronously for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2473.074335] env[62346]: DEBUG nova.compute.utils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Using /dev/sd instead of None {{(pid=62346) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2473.076501] env[62346]: DEBUG nova.compute.manager [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Allocating IP information in the background. {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2473.076681] env[62346]: DEBUG nova.network.neutron [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] allocate_for_instance() {{(pid=62346) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2473.088365] env[62346]: DEBUG nova.compute.manager [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Start building block device mappings for instance. {{(pid=62346) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2473.168502] env[62346]: DEBUG nova.policy [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c601083f0a44da850b33189c701bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abc1ead3f9a9442ca0b85f152f94fe6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62346) authorize /opt/stack/nova/nova/policy.py:203}} [ 2473.189602] env[62346]: DEBUG nova.compute.manager [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Start spawning the instance on the hypervisor. {{(pid=62346) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2473.218606] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T11:25:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T11:25:33Z,direct_url=,disk_format='vmdk',id=9feb52a6-5366-4257-bc23-471887ce1370,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='663b4a65cd3440018494db77614ee169',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T11:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2473.218842] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2473.218995] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image limits 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2473.219200] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Flavor pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2473.219351] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Image pref 0:0:0 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2473.219775] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62346) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2473.219775] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2473.219992] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2473.220069] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Got 1 possible topologies {{(pid=62346) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2473.220223] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2473.220396] env[62346]: DEBUG nova.virt.hardware [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62346) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2473.221276] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301029ab-421e-4b41-8a96-186625a2eee8 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.230264] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97f4b33-023d-4999-b3ba-a63a4e01ae64 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.285057] env[62346]: DEBUG nova.network.neutron [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2473.305098] env[62346]: INFO nova.compute.manager [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Took 0.47 seconds to deallocate network for instance. [ 2473.413787] env[62346]: INFO nova.scheduler.client.report [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Deleted allocations for instance 567d2348-be32-4158-a5e0-0a724ca81299 [ 2473.440168] env[62346]: DEBUG oslo_concurrency.lockutils [None req-c756169e-a5b7-4a81-8c6b-9be53d47cfb9 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "567d2348-be32-4158-a5e0-0a724ca81299" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 393.136s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.442324] env[62346]: DEBUG oslo_concurrency.lockutils [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "567d2348-be32-4158-a5e0-0a724ca81299" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 197.862s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2473.442324] env[62346]: DEBUG oslo_concurrency.lockutils [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "567d2348-be32-4158-a5e0-0a724ca81299-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2473.442324] env[62346]: DEBUG oslo_concurrency.lockutils [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "567d2348-be32-4158-a5e0-0a724ca81299-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2473.442324] env[62346]: DEBUG oslo_concurrency.lockutils [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "567d2348-be32-4158-a5e0-0a724ca81299-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.443352] env[62346]: INFO nova.compute.manager [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Terminating instance [ 2473.446805] env[62346]: DEBUG nova.compute.manager [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Start destroying the instance on the hypervisor. {{(pid=62346) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2473.447011] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Destroying instance {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2473.447343] env[62346]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a26e242f-733b-401f-919b-e175534cfce1 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.457520] env[62346]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29ba013-cf4d-4336-bc42-105bb826ea7e {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.485832] env[62346]: WARNING nova.virt.vmwareapi.vmops [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 567d2348-be32-4158-a5e0-0a724ca81299 could not be found. [ 2473.486131] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Instance destroyed {{(pid=62346) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2473.486279] env[62346]: INFO nova.compute.manager [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2473.486819] env[62346]: DEBUG oslo.service.loopingcall [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2473.486819] env[62346]: DEBUG nova.compute.manager [-] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Deallocating network for instance {{(pid=62346) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2473.486819] env[62346]: DEBUG nova.network.neutron [-] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] deallocate_for_instance() {{(pid=62346) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2473.519339] env[62346]: DEBUG nova.network.neutron [-] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Updating instance_info_cache with network_info: [] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2473.528219] env[62346]: INFO nova.compute.manager [-] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] Took 0.04 seconds to deallocate network for instance. [ 2473.609804] env[62346]: DEBUG nova.network.neutron [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Successfully created port: 7a4a4bdb-824a-4f83-8f5a-221146d711b1 {{(pid=62346) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2473.627853] env[62346]: DEBUG oslo_concurrency.lockutils [None req-770c185d-78e7-4c31-8109-8c1d3ff60eb2 tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Lock "567d2348-be32-4158-a5e0-0a724ca81299" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.628754] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "567d2348-be32-4158-a5e0-0a724ca81299" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 112.953s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2473.629891] env[62346]: INFO nova.compute.manager [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] [instance: 567d2348-be32-4158-a5e0-0a724ca81299] During sync_power_state the instance has a pending task (deleting). Skip. [ 2473.629891] env[62346]: DEBUG oslo_concurrency.lockutils [None req-17b91105-54b2-4719-a7c6-6b62166bb34e None None] Lock "567d2348-be32-4158-a5e0-0a724ca81299" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2474.522970] env[62346]: DEBUG nova.network.neutron [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Successfully updated port: 7a4a4bdb-824a-4f83-8f5a-221146d711b1 {{(pid=62346) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2474.535567] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "refresh_cache-38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2474.535702] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "refresh_cache-38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2474.535850] env[62346]: DEBUG nova.network.neutron [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Building network info cache for instance {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2474.583435] env[62346]: DEBUG nova.network.neutron [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Instance cache missing network info. {{(pid=62346) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2474.761611] env[62346]: DEBUG nova.network.neutron [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Updating instance_info_cache with network_info: [{"id": "7a4a4bdb-824a-4f83-8f5a-221146d711b1", "address": "fa:16:3e:2b:2d:2e", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a4a4bdb-82", "ovs_interfaceid": "7a4a4bdb-824a-4f83-8f5a-221146d711b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2474.800624] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "refresh_cache-38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2474.800953] env[62346]: DEBUG nova.compute.manager [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Instance network_info: |[{"id": "7a4a4bdb-824a-4f83-8f5a-221146d711b1", "address": "fa:16:3e:2b:2d:2e", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a4a4bdb-82", "ovs_interfaceid": "7a4a4bdb-824a-4f83-8f5a-221146d711b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62346) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2474.801786] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:2d:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a4a4bdb-824a-4f83-8f5a-221146d711b1', 'vif_model': 'vmxnet3'}] {{(pid=62346) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2474.809935] env[62346]: DEBUG oslo.service.loopingcall [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62346) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2474.810464] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Creating VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2474.810707] env[62346]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fb37925-f9b7-4d09-a42c-215a6ebd1285 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.831758] env[62346]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2474.831758] env[62346]: value = "task-4891827" [ 2474.831758] env[62346]: _type = "Task" [ 2474.831758] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2474.839528] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891827, 'name': CreateVM_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2475.341974] env[62346]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891827, 'name': CreateVM_Task, 'duration_secs': 0.301597} completed successfully. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2475.342885] env[62346]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Created VM on the ESX host {{(pid=62346) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2475.349933] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2475.350153] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2475.350475] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2475.350737] env[62346]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffbb8a50-c398-45ea-bdb4-1843d43c9c81 {{(pid=62346) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.356078] env[62346]: DEBUG oslo_vmware.api [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Waiting for the task: (returnval){ [ 2475.356078] env[62346]: value = "session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52c92240-072c-65c6-1d48-de7762222bcf" [ 2475.356078] env[62346]: _type = "Task" [ 2475.356078] env[62346]: } to complete. {{(pid=62346) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2475.366308] env[62346]: DEBUG oslo_vmware.api [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Task: {'id': session[52dd3b3d-436d-a322-a9ff-c7c841f57bf1]52c92240-072c-65c6-1d48-de7762222bcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62346) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2475.371530] env[62346]: DEBUG nova.compute.manager [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Received event network-vif-plugged-7a4a4bdb-824a-4f83-8f5a-221146d711b1 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2475.371737] env[62346]: DEBUG oslo_concurrency.lockutils [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] Acquiring lock "38c17fd5-70a5-4615-b4b8-dd8bf7f57e28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2475.371933] env[62346]: DEBUG oslo_concurrency.lockutils [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] Lock "38c17fd5-70a5-4615-b4b8-dd8bf7f57e28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2475.372129] env[62346]: DEBUG oslo_concurrency.lockutils [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] Lock "38c17fd5-70a5-4615-b4b8-dd8bf7f57e28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62346) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2475.372260] env[62346]: DEBUG nova.compute.manager [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] No waiting events found dispatching network-vif-plugged-7a4a4bdb-824a-4f83-8f5a-221146d711b1 {{(pid=62346) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2475.372412] env[62346]: WARNING nova.compute.manager [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Received unexpected event network-vif-plugged-7a4a4bdb-824a-4f83-8f5a-221146d711b1 for instance with vm_state building and task_state spawning. [ 2475.372602] env[62346]: DEBUG nova.compute.manager [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Received event network-changed-7a4a4bdb-824a-4f83-8f5a-221146d711b1 {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2475.372700] env[62346]: DEBUG nova.compute.manager [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Refreshing instance network info cache due to event network-changed-7a4a4bdb-824a-4f83-8f5a-221146d711b1. {{(pid=62346) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2475.372869] env[62346]: DEBUG oslo_concurrency.lockutils [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] Acquiring lock "refresh_cache-38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2475.372999] env[62346]: DEBUG oslo_concurrency.lockutils [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] Acquired lock "refresh_cache-38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2475.373165] env[62346]: DEBUG nova.network.neutron [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Refreshing network info cache for port 7a4a4bdb-824a-4f83-8f5a-221146d711b1 {{(pid=62346) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2475.639347] env[62346]: DEBUG nova.network.neutron [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Updated VIF entry in instance network info cache for port 7a4a4bdb-824a-4f83-8f5a-221146d711b1. {{(pid=62346) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2475.639742] env[62346]: DEBUG nova.network.neutron [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Updating instance_info_cache with network_info: [{"id": "7a4a4bdb-824a-4f83-8f5a-221146d711b1", "address": "fa:16:3e:2b:2d:2e", "network": {"id": "54d4d1cd-902e-4cd8-8338-89ae585e66ac", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1733666810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abc1ead3f9a9442ca0b85f152f94fe6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a4a4bdb-82", "ovs_interfaceid": "7a4a4bdb-824a-4f83-8f5a-221146d711b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62346) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2475.649284] env[62346]: DEBUG oslo_concurrency.lockutils [req-87bd4fd6-b81f-41c0-85af-5e04401577eb req-d8f0c870-ac55-4439-99bc-0874702fd127 service nova] Releasing lock "refresh_cache-38c17fd5-70a5-4615-b4b8-dd8bf7f57e28" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2475.866297] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2475.866582] env[62346]: DEBUG nova.virt.vmwareapi.vmops [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] [instance: 38c17fd5-70a5-4615-b4b8-dd8bf7f57e28] Processing image 9feb52a6-5366-4257-bc23-471887ce1370 {{(pid=62346) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2475.866800] env[62346]: DEBUG oslo_concurrency.lockutils [None req-3590c128-46fa-4211-8f41-e136621b747f tempest-DeleteServersTestJSON-1150526204 tempest-DeleteServersTestJSON-1150526204-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9feb52a6-5366-4257-bc23-471887ce1370/9feb52a6-5366-4257-bc23-471887ce1370.vmdk" {{(pid=62346) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}}